diff --git a/sdk/datafactory/azure-mgmt-datafactory/_meta.json b/sdk/datafactory/azure-mgmt-datafactory/_meta.json index cdb17c9e0768..31f5d386867d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/_meta.json +++ b/sdk/datafactory/azure-mgmt-datafactory/_meta.json @@ -1,11 +1,11 @@ { - "autorest": "3.4.5", + "autorest": "3.7.2", "use": [ - "@autorest/python@5.8.4", - "@autorest/modelerfour@4.19.2" + "@autorest/python@5.12.0", + "@autorest/modelerfour@4.19.3" ], - "commit": "9e8a591da83285d863866ecd2f6fe87a72758a7d", + "commit": "91c198e26991291a67ecb3aab163b255746d0b58", "repository_url": "https://github.com/Azure/azure-rest-api-specs", - "autorest_command": "autorest specification/datafactory/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --track2 --use=@autorest/python@5.8.4 --use=@autorest/modelerfour@4.19.2 --version=3.4.5", + "autorest_command": "autorest specification/datafactory/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --track2 --use=@autorest/python@5.12.0 --use=@autorest/modelerfour@4.19.3 --version=3.7.2", "readme": "specification/datafactory/resource-manager/readme.md" } \ No newline at end of file diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/__init__.py index 0363a016fcc0..2586d071ab79 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/__init__.py @@ -12,8 +12,7 @@ __version__ = VERSION __all__ = ['DataFactoryManagementClient'] -try: - from ._patch import patch_sdk # type: ignore - patch_sdk() -except ImportError: - pass +# `._patch.py` is used for handwritten extensions to the generated code +# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md +from ._patch import patch_sdk +patch_sdk() diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_configuration.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_configuration.py index 79ca686fbd5b..3ad3848c7f2b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_configuration.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_configuration.py @@ -10,7 +10,7 @@ from azure.core.configuration import Configuration from azure.core.pipeline import policies -from azure.mgmt.core.policies import ARMHttpLoggingPolicy +from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy from ._version import VERSION @@ -40,11 +40,11 @@ def __init__( **kwargs # type: Any ): # type: (...) -> None + super(DataFactoryManagementClientConfiguration, self).__init__(**kwargs) if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: raise ValueError("Parameter 'subscription_id' must not be None.") - super(DataFactoryManagementClientConfiguration, self).__init__(**kwargs) self.credential = credential self.subscription_id = subscription_id @@ -68,4 +68,4 @@ def _configure( self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) self.authentication_policy = kwargs.get('authentication_policy') if self.credential and not self.authentication_policy: - self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) + self.authentication_policy = ARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py index 8d4043c2530d..a71ae9ce8e10 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py @@ -6,41 +6,22 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from copy import deepcopy from typing import TYPE_CHECKING from azure.mgmt.core import ARMPipelineClient from msrest import Deserializer, Serializer +from . import models +from ._configuration import DataFactoryManagementClientConfiguration +from .operations import ActivityRunsOperations, DataFlowDebugSessionOperations, DataFlowsOperations, DatasetsOperations, ExposureControlOperations, FactoriesOperations, IntegrationRuntimeNodesOperations, IntegrationRuntimeObjectMetadataOperations, IntegrationRuntimesOperations, LinkedServicesOperations, ManagedPrivateEndpointsOperations, ManagedVirtualNetworksOperations, Operations, PipelineRunsOperations, PipelinesOperations, PrivateEndPointConnectionsOperations, PrivateEndpointConnectionOperations, PrivateLinkResourcesOperations, TriggerRunsOperations, TriggersOperations + if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Optional from azure.core.credentials import TokenCredential - from azure.core.pipeline.transport import HttpRequest, HttpResponse - -from ._configuration import DataFactoryManagementClientConfiguration -from .operations import Operations -from .operations import FactoriesOperations -from .operations import ExposureControlOperations -from .operations import IntegrationRuntimesOperations -from .operations import IntegrationRuntimeObjectMetadataOperations -from .operations import IntegrationRuntimeNodesOperations -from .operations import LinkedServicesOperations -from .operations import DatasetsOperations -from .operations import PipelinesOperations -from .operations import PipelineRunsOperations -from .operations import ActivityRunsOperations -from .operations import TriggersOperations -from .operations import TriggerRunsOperations -from .operations import DataFlowsOperations -from .operations import DataFlowDebugSessionOperations -from .operations import ManagedVirtualNetworksOperations -from .operations import ManagedPrivateEndpointsOperations -from .operations import PrivateEndPointConnectionsOperations -from .operations import PrivateEndpointConnectionOperations -from .operations import PrivateLinkResourcesOperations -from . import models - + from azure.core.rest import HttpRequest, HttpResponse class DataFactoryManagementClient(object): """The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. @@ -53,10 +34,13 @@ class DataFactoryManagementClient(object): :vartype exposure_control: azure.mgmt.datafactory.operations.ExposureControlOperations :ivar integration_runtimes: IntegrationRuntimesOperations operations :vartype integration_runtimes: azure.mgmt.datafactory.operations.IntegrationRuntimesOperations - :ivar integration_runtime_object_metadata: IntegrationRuntimeObjectMetadataOperations operations - :vartype integration_runtime_object_metadata: azure.mgmt.datafactory.operations.IntegrationRuntimeObjectMetadataOperations + :ivar integration_runtime_object_metadata: IntegrationRuntimeObjectMetadataOperations + operations + :vartype integration_runtime_object_metadata: + azure.mgmt.datafactory.operations.IntegrationRuntimeObjectMetadataOperations :ivar integration_runtime_nodes: IntegrationRuntimeNodesOperations operations - :vartype integration_runtime_nodes: azure.mgmt.datafactory.operations.IntegrationRuntimeNodesOperations + :vartype integration_runtime_nodes: + azure.mgmt.datafactory.operations.IntegrationRuntimeNodesOperations :ivar linked_services: LinkedServicesOperations operations :vartype linked_services: azure.mgmt.datafactory.operations.LinkedServicesOperations :ivar datasets: DatasetsOperations operations @@ -74,101 +58,96 @@ class DataFactoryManagementClient(object): :ivar data_flows: DataFlowsOperations operations :vartype data_flows: azure.mgmt.datafactory.operations.DataFlowsOperations :ivar data_flow_debug_session: DataFlowDebugSessionOperations operations - :vartype data_flow_debug_session: azure.mgmt.datafactory.operations.DataFlowDebugSessionOperations + :vartype data_flow_debug_session: + azure.mgmt.datafactory.operations.DataFlowDebugSessionOperations :ivar managed_virtual_networks: ManagedVirtualNetworksOperations operations - :vartype managed_virtual_networks: azure.mgmt.datafactory.operations.ManagedVirtualNetworksOperations + :vartype managed_virtual_networks: + azure.mgmt.datafactory.operations.ManagedVirtualNetworksOperations :ivar managed_private_endpoints: ManagedPrivateEndpointsOperations operations - :vartype managed_private_endpoints: azure.mgmt.datafactory.operations.ManagedPrivateEndpointsOperations + :vartype managed_private_endpoints: + azure.mgmt.datafactory.operations.ManagedPrivateEndpointsOperations :ivar private_end_point_connections: PrivateEndPointConnectionsOperations operations - :vartype private_end_point_connections: azure.mgmt.datafactory.operations.PrivateEndPointConnectionsOperations + :vartype private_end_point_connections: + azure.mgmt.datafactory.operations.PrivateEndPointConnectionsOperations :ivar private_endpoint_connection: PrivateEndpointConnectionOperations operations - :vartype private_endpoint_connection: azure.mgmt.datafactory.operations.PrivateEndpointConnectionOperations + :vartype private_endpoint_connection: + azure.mgmt.datafactory.operations.PrivateEndpointConnectionOperations :ivar private_link_resources: PrivateLinkResourcesOperations operations - :vartype private_link_resources: azure.mgmt.datafactory.operations.PrivateLinkResourcesOperations + :vartype private_link_resources: + azure.mgmt.datafactory.operations.PrivateLinkResourcesOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The subscription identifier. :type subscription_id: str - :param str base_url: Service URL - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :param base_url: Service URL. Default value is 'https://management.azure.com'. + :type base_url: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. """ def __init__( self, credential, # type: "TokenCredential" subscription_id, # type: str - base_url=None, # type: Optional[str] + base_url="https://management.azure.com", # type: str **kwargs # type: Any ): # type: (...) -> None - if not base_url: - base_url = 'https://management.azure.com' - self._config = DataFactoryManagementClientConfiguration(credential, subscription_id, **kwargs) + self._config = DataFactoryManagementClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs) self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) - self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) - - self.operations = Operations( - self._client, self._config, self._serialize, self._deserialize) - self.factories = FactoriesOperations( - self._client, self._config, self._serialize, self._deserialize) - self.exposure_control = ExposureControlOperations( - self._client, self._config, self._serialize, self._deserialize) - self.integration_runtimes = IntegrationRuntimesOperations( - self._client, self._config, self._serialize, self._deserialize) - self.integration_runtime_object_metadata = IntegrationRuntimeObjectMetadataOperations( - self._client, self._config, self._serialize, self._deserialize) - self.integration_runtime_nodes = IntegrationRuntimeNodesOperations( - self._client, self._config, self._serialize, self._deserialize) - self.linked_services = LinkedServicesOperations( - self._client, self._config, self._serialize, self._deserialize) - self.datasets = DatasetsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.pipelines = PipelinesOperations( - self._client, self._config, self._serialize, self._deserialize) - self.pipeline_runs = PipelineRunsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.activity_runs = ActivityRunsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.triggers = TriggersOperations( - self._client, self._config, self._serialize, self._deserialize) - self.trigger_runs = TriggerRunsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.data_flows = DataFlowsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.data_flow_debug_session = DataFlowDebugSessionOperations( - self._client, self._config, self._serialize, self._deserialize) - self.managed_virtual_networks = ManagedVirtualNetworksOperations( - self._client, self._config, self._serialize, self._deserialize) - self.managed_private_endpoints = ManagedPrivateEndpointsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.private_end_point_connections = PrivateEndPointConnectionsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.private_endpoint_connection = PrivateEndpointConnectionOperations( - self._client, self._config, self._serialize, self._deserialize) - self.private_link_resources = PrivateLinkResourcesOperations( - self._client, self._config, self._serialize, self._deserialize) - - def _send_request(self, http_request, **kwargs): - # type: (HttpRequest, Any) -> HttpResponse + self._serialize.client_side_validation = False + self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) + self.factories = FactoriesOperations(self._client, self._config, self._serialize, self._deserialize) + self.exposure_control = ExposureControlOperations(self._client, self._config, self._serialize, self._deserialize) + self.integration_runtimes = IntegrationRuntimesOperations(self._client, self._config, self._serialize, self._deserialize) + self.integration_runtime_object_metadata = IntegrationRuntimeObjectMetadataOperations(self._client, self._config, self._serialize, self._deserialize) + self.integration_runtime_nodes = IntegrationRuntimeNodesOperations(self._client, self._config, self._serialize, self._deserialize) + self.linked_services = LinkedServicesOperations(self._client, self._config, self._serialize, self._deserialize) + self.datasets = DatasetsOperations(self._client, self._config, self._serialize, self._deserialize) + self.pipelines = PipelinesOperations(self._client, self._config, self._serialize, self._deserialize) + self.pipeline_runs = PipelineRunsOperations(self._client, self._config, self._serialize, self._deserialize) + self.activity_runs = ActivityRunsOperations(self._client, self._config, self._serialize, self._deserialize) + self.triggers = TriggersOperations(self._client, self._config, self._serialize, self._deserialize) + self.trigger_runs = TriggerRunsOperations(self._client, self._config, self._serialize, self._deserialize) + self.data_flows = DataFlowsOperations(self._client, self._config, self._serialize, self._deserialize) + self.data_flow_debug_session = DataFlowDebugSessionOperations(self._client, self._config, self._serialize, self._deserialize) + self.managed_virtual_networks = ManagedVirtualNetworksOperations(self._client, self._config, self._serialize, self._deserialize) + self.managed_private_endpoints = ManagedPrivateEndpointsOperations(self._client, self._config, self._serialize, self._deserialize) + self.private_end_point_connections = PrivateEndPointConnectionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.private_endpoint_connection = PrivateEndpointConnectionOperations(self._client, self._config, self._serialize, self._deserialize) + self.private_link_resources = PrivateLinkResourcesOperations(self._client, self._config, self._serialize, self._deserialize) + + + def _send_request( + self, + request, # type: HttpRequest + **kwargs # type: Any + ): + # type: (...) -> HttpResponse """Runs the network request through the client's chained policies. - :param http_request: The network request you want to make. Required. - :type http_request: ~azure.core.pipeline.transport.HttpRequest - :keyword bool stream: Whether the response payload will be streamed. Defaults to True. + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client._send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. :return: The response of your network call. Does not do error handling on your response. - :rtype: ~azure.core.pipeline.transport.HttpResponse + :rtype: ~azure.core.rest.HttpResponse """ - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - http_request.url = self._client.format_url(http_request.url, **path_format_arguments) - stream = kwargs.pop("stream", True) - pipeline_response = self._client._pipeline.run(http_request, stream=stream, **kwargs) - return pipeline_response.http_response + + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, **kwargs) def close(self): # type: () -> None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_metadata.json b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_metadata.json index dd5e00735108..1dc0ed607431 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_metadata.json +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_metadata.json @@ -5,13 +5,13 @@ "name": "DataFactoryManagementClient", "filename": "_data_factory_management_client", "description": "The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services.", - "base_url": "\u0027https://management.azure.com\u0027", - "custom_base_url": null, + "host_value": "\"https://management.azure.com\"", + "parameterized_host_template": null, "azure_arm": true, "has_lro_operations": true, "client_side_validation": false, - "sync_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"ARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"DataFactoryManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}, \"azurecore\": {\"azure.core.pipeline.transport\": [\"HttpRequest\", \"HttpResponse\"]}}}", - "async_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"AsyncARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"DataFactoryManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}, \"azurecore\": {\"azure.core.pipeline.transport\": [\"AsyncHttpResponse\", \"HttpRequest\"]}}}" + "sync_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"ARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"DataFactoryManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}}}", + "async_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"], \"azure.core.credentials\": [\"TokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"AsyncARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"DataFactoryManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}}}" }, "global_parameters": { "sync": { @@ -54,7 +54,7 @@ "required": false }, "base_url": { - "signature": "base_url=None, # type: Optional[str]", + "signature": "base_url=\"https://management.azure.com\", # type: str", "description": "Service URL", "docstring_type": "str", "required": false @@ -74,7 +74,7 @@ "required": false }, "base_url": { - "signature": "base_url: Optional[str] = None,", + "signature": "base_url: str = \"https://management.azure.com\",", "description": "Service URL", "docstring_type": "str", "required": false @@ -91,11 +91,10 @@ "config": { "credential": true, "credential_scopes": ["https://management.azure.com/.default"], - "credential_default_policy_type": "BearerTokenCredentialPolicy", - "credential_default_policy_type_has_async_version": true, - "credential_key_header_name": null, - "sync_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\"]}, \"local\": {\"._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}}", - "async_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\"]}, \"local\": {\".._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}}" + "credential_call_sync": "ARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs)", + "credential_call_async": "AsyncARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs)", + "sync_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMChallengeAuthenticationPolicy\", \"ARMHttpLoggingPolicy\"]}, \"local\": {\"._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}}", + "async_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\", \"AsyncARMChallengeAuthenticationPolicy\"]}, \"local\": {\".._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}}" }, "operation_groups": { "operations": "Operations", diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_patch.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_patch.py new file mode 100644 index 000000000000..74e48ecd07cf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_patch.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +# This file is used for handwritten extensions to the generated code. Example: +# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md +def patch_sdk(): + pass \ No newline at end of file diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_vendor.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_vendor.py new file mode 100644 index 000000000000..138f663c53a4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_vendor.py @@ -0,0 +1,27 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.pipeline.transport import HttpRequest + +def _convert_request(request, files=None): + data = request.content if not files else None + request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) + if files: + request.set_formdata_body(files) + return request + +def _format_url_section(template, **kwargs): + components = template.split("/") + while components: + try: + return template.format(**kwargs) + except KeyError as key: + formatted_components = template.split("/") + components = [ + c for c in formatted_components if "{}".format(key.args[0]) not in c + ] + template = "/".join(components) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py index 83f24ab50946..c47f66669f1b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "2.1.0" +VERSION = "1.0.0" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/__init__.py index 571673cab5c8..b9490bca13bb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/__init__.py @@ -8,3 +8,8 @@ from ._data_factory_management_client import DataFactoryManagementClient __all__ = ['DataFactoryManagementClient'] + +# `._patch.py` is used for handwritten extensions to the generated code +# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md +from ._patch import patch_sdk +patch_sdk() diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_configuration.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_configuration.py index e540bdbfb3f2..8f00dd83cf71 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_configuration.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_configuration.py @@ -10,7 +10,7 @@ from azure.core.configuration import Configuration from azure.core.pipeline import policies -from azure.mgmt.core.policies import ARMHttpLoggingPolicy +from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy from .._version import VERSION @@ -37,11 +37,11 @@ def __init__( subscription_id: str, **kwargs: Any ) -> None: + super(DataFactoryManagementClientConfiguration, self).__init__(**kwargs) if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: raise ValueError("Parameter 'subscription_id' must not be None.") - super(DataFactoryManagementClientConfiguration, self).__init__(**kwargs) self.credential = credential self.subscription_id = subscription_id @@ -64,4 +64,4 @@ def _configure( self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) self.authentication_policy = kwargs.get('authentication_policy') if self.credential and not self.authentication_policy: - self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) + self.authentication_policy = AsyncARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_data_factory_management_client.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_data_factory_management_client.py index b0adc1172540..8d7e965f0a71 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_data_factory_management_client.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_data_factory_management_client.py @@ -6,41 +6,22 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Optional, TYPE_CHECKING +from copy import deepcopy +from typing import Any, Awaitable, Optional, TYPE_CHECKING -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.mgmt.core import AsyncARMPipelineClient from msrest import Deserializer, Serializer +from .. import models +from ._configuration import DataFactoryManagementClientConfiguration +from .operations import ActivityRunsOperations, DataFlowDebugSessionOperations, DataFlowsOperations, DatasetsOperations, ExposureControlOperations, FactoriesOperations, IntegrationRuntimeNodesOperations, IntegrationRuntimeObjectMetadataOperations, IntegrationRuntimesOperations, LinkedServicesOperations, ManagedPrivateEndpointsOperations, ManagedVirtualNetworksOperations, Operations, PipelineRunsOperations, PipelinesOperations, PrivateEndPointConnectionsOperations, PrivateEndpointConnectionOperations, PrivateLinkResourcesOperations, TriggerRunsOperations, TriggersOperations + if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential -from ._configuration import DataFactoryManagementClientConfiguration -from .operations import Operations -from .operations import FactoriesOperations -from .operations import ExposureControlOperations -from .operations import IntegrationRuntimesOperations -from .operations import IntegrationRuntimeObjectMetadataOperations -from .operations import IntegrationRuntimeNodesOperations -from .operations import LinkedServicesOperations -from .operations import DatasetsOperations -from .operations import PipelinesOperations -from .operations import PipelineRunsOperations -from .operations import ActivityRunsOperations -from .operations import TriggersOperations -from .operations import TriggerRunsOperations -from .operations import DataFlowsOperations -from .operations import DataFlowDebugSessionOperations -from .operations import ManagedVirtualNetworksOperations -from .operations import ManagedPrivateEndpointsOperations -from .operations import PrivateEndPointConnectionsOperations -from .operations import PrivateEndpointConnectionOperations -from .operations import PrivateLinkResourcesOperations -from .. import models - - -class DataFactoryManagementClient(object): +class DataFactoryManagementClient: """The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. :ivar operations: Operations operations @@ -50,11 +31,15 @@ class DataFactoryManagementClient(object): :ivar exposure_control: ExposureControlOperations operations :vartype exposure_control: azure.mgmt.datafactory.aio.operations.ExposureControlOperations :ivar integration_runtimes: IntegrationRuntimesOperations operations - :vartype integration_runtimes: azure.mgmt.datafactory.aio.operations.IntegrationRuntimesOperations - :ivar integration_runtime_object_metadata: IntegrationRuntimeObjectMetadataOperations operations - :vartype integration_runtime_object_metadata: azure.mgmt.datafactory.aio.operations.IntegrationRuntimeObjectMetadataOperations + :vartype integration_runtimes: + azure.mgmt.datafactory.aio.operations.IntegrationRuntimesOperations + :ivar integration_runtime_object_metadata: IntegrationRuntimeObjectMetadataOperations + operations + :vartype integration_runtime_object_metadata: + azure.mgmt.datafactory.aio.operations.IntegrationRuntimeObjectMetadataOperations :ivar integration_runtime_nodes: IntegrationRuntimeNodesOperations operations - :vartype integration_runtime_nodes: azure.mgmt.datafactory.aio.operations.IntegrationRuntimeNodesOperations + :vartype integration_runtime_nodes: + azure.mgmt.datafactory.aio.operations.IntegrationRuntimeNodesOperations :ivar linked_services: LinkedServicesOperations operations :vartype linked_services: azure.mgmt.datafactory.aio.operations.LinkedServicesOperations :ivar datasets: DatasetsOperations operations @@ -72,99 +57,94 @@ class DataFactoryManagementClient(object): :ivar data_flows: DataFlowsOperations operations :vartype data_flows: azure.mgmt.datafactory.aio.operations.DataFlowsOperations :ivar data_flow_debug_session: DataFlowDebugSessionOperations operations - :vartype data_flow_debug_session: azure.mgmt.datafactory.aio.operations.DataFlowDebugSessionOperations + :vartype data_flow_debug_session: + azure.mgmt.datafactory.aio.operations.DataFlowDebugSessionOperations :ivar managed_virtual_networks: ManagedVirtualNetworksOperations operations - :vartype managed_virtual_networks: azure.mgmt.datafactory.aio.operations.ManagedVirtualNetworksOperations + :vartype managed_virtual_networks: + azure.mgmt.datafactory.aio.operations.ManagedVirtualNetworksOperations :ivar managed_private_endpoints: ManagedPrivateEndpointsOperations operations - :vartype managed_private_endpoints: azure.mgmt.datafactory.aio.operations.ManagedPrivateEndpointsOperations + :vartype managed_private_endpoints: + azure.mgmt.datafactory.aio.operations.ManagedPrivateEndpointsOperations :ivar private_end_point_connections: PrivateEndPointConnectionsOperations operations - :vartype private_end_point_connections: azure.mgmt.datafactory.aio.operations.PrivateEndPointConnectionsOperations + :vartype private_end_point_connections: + azure.mgmt.datafactory.aio.operations.PrivateEndPointConnectionsOperations :ivar private_endpoint_connection: PrivateEndpointConnectionOperations operations - :vartype private_endpoint_connection: azure.mgmt.datafactory.aio.operations.PrivateEndpointConnectionOperations + :vartype private_endpoint_connection: + azure.mgmt.datafactory.aio.operations.PrivateEndpointConnectionOperations :ivar private_link_resources: PrivateLinkResourcesOperations operations - :vartype private_link_resources: azure.mgmt.datafactory.aio.operations.PrivateLinkResourcesOperations + :vartype private_link_resources: + azure.mgmt.datafactory.aio.operations.PrivateLinkResourcesOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The subscription identifier. :type subscription_id: str - :param str base_url: Service URL - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :param base_url: Service URL. Default value is 'https://management.azure.com'. + :type base_url: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. """ def __init__( self, credential: "AsyncTokenCredential", subscription_id: str, - base_url: Optional[str] = None, + base_url: str = "https://management.azure.com", **kwargs: Any ) -> None: - if not base_url: - base_url = 'https://management.azure.com' - self._config = DataFactoryManagementClientConfiguration(credential, subscription_id, **kwargs) + self._config = DataFactoryManagementClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs) self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) - self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) + self._serialize.client_side_validation = False + self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) + self.factories = FactoriesOperations(self._client, self._config, self._serialize, self._deserialize) + self.exposure_control = ExposureControlOperations(self._client, self._config, self._serialize, self._deserialize) + self.integration_runtimes = IntegrationRuntimesOperations(self._client, self._config, self._serialize, self._deserialize) + self.integration_runtime_object_metadata = IntegrationRuntimeObjectMetadataOperations(self._client, self._config, self._serialize, self._deserialize) + self.integration_runtime_nodes = IntegrationRuntimeNodesOperations(self._client, self._config, self._serialize, self._deserialize) + self.linked_services = LinkedServicesOperations(self._client, self._config, self._serialize, self._deserialize) + self.datasets = DatasetsOperations(self._client, self._config, self._serialize, self._deserialize) + self.pipelines = PipelinesOperations(self._client, self._config, self._serialize, self._deserialize) + self.pipeline_runs = PipelineRunsOperations(self._client, self._config, self._serialize, self._deserialize) + self.activity_runs = ActivityRunsOperations(self._client, self._config, self._serialize, self._deserialize) + self.triggers = TriggersOperations(self._client, self._config, self._serialize, self._deserialize) + self.trigger_runs = TriggerRunsOperations(self._client, self._config, self._serialize, self._deserialize) + self.data_flows = DataFlowsOperations(self._client, self._config, self._serialize, self._deserialize) + self.data_flow_debug_session = DataFlowDebugSessionOperations(self._client, self._config, self._serialize, self._deserialize) + self.managed_virtual_networks = ManagedVirtualNetworksOperations(self._client, self._config, self._serialize, self._deserialize) + self.managed_private_endpoints = ManagedPrivateEndpointsOperations(self._client, self._config, self._serialize, self._deserialize) + self.private_end_point_connections = PrivateEndPointConnectionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.private_endpoint_connection = PrivateEndpointConnectionOperations(self._client, self._config, self._serialize, self._deserialize) + self.private_link_resources = PrivateLinkResourcesOperations(self._client, self._config, self._serialize, self._deserialize) + - self.operations = Operations( - self._client, self._config, self._serialize, self._deserialize) - self.factories = FactoriesOperations( - self._client, self._config, self._serialize, self._deserialize) - self.exposure_control = ExposureControlOperations( - self._client, self._config, self._serialize, self._deserialize) - self.integration_runtimes = IntegrationRuntimesOperations( - self._client, self._config, self._serialize, self._deserialize) - self.integration_runtime_object_metadata = IntegrationRuntimeObjectMetadataOperations( - self._client, self._config, self._serialize, self._deserialize) - self.integration_runtime_nodes = IntegrationRuntimeNodesOperations( - self._client, self._config, self._serialize, self._deserialize) - self.linked_services = LinkedServicesOperations( - self._client, self._config, self._serialize, self._deserialize) - self.datasets = DatasetsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.pipelines = PipelinesOperations( - self._client, self._config, self._serialize, self._deserialize) - self.pipeline_runs = PipelineRunsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.activity_runs = ActivityRunsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.triggers = TriggersOperations( - self._client, self._config, self._serialize, self._deserialize) - self.trigger_runs = TriggerRunsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.data_flows = DataFlowsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.data_flow_debug_session = DataFlowDebugSessionOperations( - self._client, self._config, self._serialize, self._deserialize) - self.managed_virtual_networks = ManagedVirtualNetworksOperations( - self._client, self._config, self._serialize, self._deserialize) - self.managed_private_endpoints = ManagedPrivateEndpointsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.private_end_point_connections = PrivateEndPointConnectionsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.private_endpoint_connection = PrivateEndpointConnectionOperations( - self._client, self._config, self._serialize, self._deserialize) - self.private_link_resources = PrivateLinkResourcesOperations( - self._client, self._config, self._serialize, self._deserialize) - - async def _send_request(self, http_request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse: + def _send_request( + self, + request: HttpRequest, + **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: """Runs the network request through the client's chained policies. - :param http_request: The network request you want to make. Required. - :type http_request: ~azure.core.pipeline.transport.HttpRequest - :keyword bool stream: Whether the response payload will be streamed. Defaults to True. + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client._send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. :return: The response of your network call. Does not do error handling on your response. - :rtype: ~azure.core.pipeline.transport.AsyncHttpResponse + :rtype: ~azure.core.rest.AsyncHttpResponse """ - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - http_request.url = self._client.format_url(http_request.url, **path_format_arguments) - stream = kwargs.pop("stream", True) - pipeline_response = await self._client._pipeline.run(http_request, stream=stream, **kwargs) - return pipeline_response.http_response + + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, **kwargs) async def close(self) -> None: await self._client.close() diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_patch.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_patch.py new file mode 100644 index 000000000000..74e48ecd07cf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_patch.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +# This file is used for handwritten extensions to the generated code. Example: +# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md +def patch_sdk(): + pass \ No newline at end of file diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_activity_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_activity_runs_operations.py index 218ae0a7fe2a..2e4c7770e9ee 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_activity_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_activity_runs_operations.py @@ -5,16 +5,20 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._activity_runs_operations import build_query_by_pipeline_run_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -40,6 +44,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace_async async def query_by_pipeline_run( self, resource_group_name: str, @@ -68,33 +73,23 @@ async def query_by_pipeline_run( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.query_by_pipeline_run.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(filter_parameters, 'RunFilterParameters') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_query_by_pipeline_run_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + run_id=run_id, + content_type=content_type, + json=_json, + template_url=self.query_by_pipeline_run.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -108,4 +103,6 @@ async def query_by_pipeline_run( return cls(pipeline_response, deserialized, {}) return deserialized + query_by_pipeline_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/queryActivityruns'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flow_debug_session_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flow_debug_session_operations.py index db31ffe70764..810bc1a9598e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flow_debug_session_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flow_debug_session_operations.py @@ -5,19 +5,24 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._data_flow_debug_session_operations import build_add_data_flow_request, build_create_request_initial, build_delete_request, build_execute_command_request_initial, build_query_by_factory_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -55,32 +60,22 @@ async def _create_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._create_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + _json = self._serialize.body(request, 'CreateDataFlowDebugSessionRequest') + + request = build_create_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + template_url=self._create_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(request, 'CreateDataFlowDebugSessionRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -88,20 +83,24 @@ async def _create_initial( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - response_headers = {} deserialized = None + response_headers = {} if response.status_code == 200: deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', pipeline_response) if response.status_code == 202: response_headers['location']=self._deserialize('str', response.headers.get('location')) + if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession'} # type: ignore + + @distributed_trace_async async def begin_create( self, resource_group_name: str, @@ -119,15 +118,20 @@ async def begin_create( :type request: ~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionRequest :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either CreateDataFlowDebugSessionResponse or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionResponse] - :raises ~azure.core.exceptions.HttpResponseError: + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CreateDataFlowDebugSessionResponse + or the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionResponse] + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.CreateDataFlowDebugSessionResponse"] lro_delay = kwargs.pop( 'polling_interval', @@ -139,27 +143,21 @@ async def begin_create( resource_group_name=resource_group_name, factory_name=factory_name, request=request, + content_type=content_type, cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): + response = pipeline_response.http_response deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -171,8 +169,10 @@ def get_long_running_output(pipeline_response): ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession'} # type: ignore + @distributed_trace def query_by_factory( self, resource_group_name: str, @@ -186,8 +186,10 @@ def query_by_factory( :param factory_name: The factory name. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either QueryDataFlowDebugSessionsResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.QueryDataFlowDebugSessionsResponse] + :return: An iterator like instance of either QueryDataFlowDebugSessionsResponse or the result + of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.QueryDataFlowDebugSessionsResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.QueryDataFlowDebugSessionsResponse"] @@ -195,36 +197,33 @@ def query_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.post(url, query_parameters, header_parameters) + + request = build_query_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=self.query_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_query_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('QueryDataFlowDebugSessionsResponse', pipeline_response) + deserialized = self._deserialize("QueryDataFlowDebugSessionsResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -242,11 +241,13 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged( get_next, extract_data ) query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryDataFlowDebugSessions'} # type: ignore + @distributed_trace_async async def add_data_flow( self, resource_group_name: str, @@ -272,32 +273,22 @@ async def add_data_flow( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.add_data_flow.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(request, 'DataFlowDebugPackage') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_add_data_flow_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + template_url=self.add_data_flow.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(request, 'DataFlowDebugPackage') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -311,8 +302,11 @@ async def add_data_flow( return cls(pipeline_response, deserialized, {}) return deserialized + add_data_flow.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/addDataFlowToDebugSession'} # type: ignore + + @distributed_trace_async async def delete( self, resource_group_name: str, @@ -338,32 +332,22 @@ async def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(request, 'DeleteDataFlowDebugSessionRequest') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(request, 'DeleteDataFlowDebugSessionRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -376,6 +360,7 @@ async def delete( delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/deleteDataFlowDebugSession'} # type: ignore + async def _execute_command_initial( self, resource_group_name: str, @@ -388,32 +373,22 @@ async def _execute_command_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._execute_command_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + _json = self._serialize.body(request, 'DataFlowDebugCommandRequest') + + request = build_execute_command_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + template_url=self._execute_command_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(request, 'DataFlowDebugCommandRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -421,20 +396,24 @@ async def _execute_command_initial( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - response_headers = {} deserialized = None + response_headers = {} if response.status_code == 200: deserialized = self._deserialize('DataFlowDebugCommandResponse', pipeline_response) if response.status_code == 202: response_headers['location']=self._deserialize('str', response.headers.get('location')) + if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + _execute_command_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand'} # type: ignore + + @distributed_trace_async async def begin_execute_command( self, resource_group_name: str, @@ -452,15 +431,20 @@ async def begin_execute_command( :type request: ~azure.mgmt.datafactory.models.DataFlowDebugCommandRequest :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either DataFlowDebugCommandResponse or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.DataFlowDebugCommandResponse] - :raises ~azure.core.exceptions.HttpResponseError: + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DataFlowDebugCommandResponse or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.DataFlowDebugCommandResponse] + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.DataFlowDebugCommandResponse"] lro_delay = kwargs.pop( 'polling_interval', @@ -472,27 +456,21 @@ async def begin_execute_command( resource_group_name=resource_group_name, factory_name=factory_name, request=request, + content_type=content_type, cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): + response = pipeline_response.http_response deserialized = self._deserialize('DataFlowDebugCommandResponse', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -504,4 +482,5 @@ def get_long_running_output(pipeline_response): ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_execute_command.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand'} # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flows_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flows_operations.py index e1ef45dc21e9..551aa713a8b5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flows_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flows_operations.py @@ -5,17 +5,22 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._data_flows_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_by_factory_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -41,6 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace_async async def create_or_update( self, resource_group_name: str, @@ -73,35 +79,24 @@ async def create_or_update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(data_flow, 'DataFlowResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(data_flow, 'DataFlowResource') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + data_flow_name=data_flow_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.create_or_update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -115,8 +110,11 @@ async def create_or_update( return cls(pipeline_response, deserialized, {}) return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore + + @distributed_trace_async async def get( self, resource_group_name: str, @@ -146,30 +144,19 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + data_flow_name=data_flow_name, + if_none_match=if_none_match, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -183,8 +170,11 @@ async def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore + + @distributed_trace_async async def delete( self, resource_group_name: str, @@ -210,28 +200,18 @@ async def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + data_flow_name=data_flow_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -244,6 +224,8 @@ async def delete( delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore + + @distributed_trace def list_by_factory( self, resource_group_name: str, @@ -257,8 +239,10 @@ def list_by_factory( :param factory_name: The factory name. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DataFlowListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.DataFlowListResponse] + :return: An iterator like instance of either DataFlowListResponse or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.DataFlowListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.DataFlowListResponse"] @@ -266,36 +250,33 @@ def list_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=self.list_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('DataFlowListResponse', pipeline_response) + deserialized = self._deserialize("DataFlowListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -313,6 +294,7 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged( get_next, extract_data ) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_datasets_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_datasets_operations.py index 4e5c8c3a5b7e..2162b0a22480 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_datasets_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_datasets_operations.py @@ -5,17 +5,22 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._datasets_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_by_factory_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -41,6 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace def list_by_factory( self, resource_group_name: str, @@ -55,7 +61,8 @@ def list_by_factory( :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DatasetListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.DatasetListResponse] + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.DatasetListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.DatasetListResponse"] @@ -63,36 +70,33 @@ def list_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=self.list_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('DatasetListResponse', pipeline_response) + deserialized = self._deserialize("DatasetListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -110,11 +114,13 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged( get_next, extract_data ) list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets'} # type: ignore + @distributed_trace_async async def create_or_update( self, resource_group_name: str, @@ -147,35 +153,24 @@ async def create_or_update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(dataset, 'DatasetResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(dataset, 'DatasetResource') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + dataset_name=dataset_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.create_or_update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -189,8 +184,11 @@ async def create_or_update( return cls(pipeline_response, deserialized, {}) return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore + + @distributed_trace_async async def get( self, resource_group_name: str, @@ -220,30 +218,19 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + dataset_name=dataset_name, + if_none_match=if_none_match, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -259,8 +246,11 @@ async def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore + + @distributed_trace_async async def delete( self, resource_group_name: str, @@ -286,28 +276,18 @@ async def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + dataset_name=dataset_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -319,3 +299,4 @@ async def delete( return cls(pipeline_response, None, {}) delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_exposure_control_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_exposure_control_operations.py index 9ea521937af2..e12ccbc0dd76 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_exposure_control_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_exposure_control_operations.py @@ -5,16 +5,20 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._exposure_control_operations import build_get_feature_value_by_factory_request, build_get_feature_value_request, build_query_feature_values_by_factory_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -40,6 +44,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace_async async def get_feature_value( self, location_id: str, @@ -62,31 +67,21 @@ async def get_feature_value( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.get_feature_value.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'locationId': self._serialize.url("location_id", location_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + _json = self._serialize.body(exposure_control_request, 'ExposureControlRequest') + + request = build_get_feature_value_request( + subscription_id=self._config.subscription_id, + location_id=location_id, + content_type=content_type, + json=_json, + template_url=self.get_feature_value.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -100,8 +95,11 @@ async def get_feature_value( return cls(pipeline_response, deserialized, {}) return deserialized + get_feature_value.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/getFeatureValue'} # type: ignore + + @distributed_trace_async async def get_feature_value_by_factory( self, resource_group_name: str, @@ -127,32 +125,22 @@ async def get_feature_value_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.get_feature_value_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(exposure_control_request, 'ExposureControlRequest') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_get_feature_value_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + template_url=self.get_feature_value_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -166,8 +154,11 @@ async def get_feature_value_by_factory( return cls(pipeline_response, deserialized, {}) return deserialized + get_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getFeatureValue'} # type: ignore + + @distributed_trace_async async def query_feature_values_by_factory( self, resource_group_name: str, @@ -182,7 +173,8 @@ async def query_feature_values_by_factory( :param factory_name: The factory name. :type factory_name: str :param exposure_control_batch_request: The exposure control request for list of features. - :type exposure_control_batch_request: ~azure.mgmt.datafactory.models.ExposureControlBatchRequest + :type exposure_control_batch_request: + ~azure.mgmt.datafactory.models.ExposureControlBatchRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ExposureControlBatchResponse, or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.ExposureControlBatchResponse @@ -193,32 +185,22 @@ async def query_feature_values_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.query_feature_values_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + _json = self._serialize.body(exposure_control_batch_request, 'ExposureControlBatchRequest') + + request = build_query_feature_values_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + template_url=self.query_feature_values_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(exposure_control_batch_request, 'ExposureControlBatchRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -232,4 +214,6 @@ async def query_feature_values_by_factory( return cls(pipeline_response, deserialized, {}) return deserialized + query_feature_values_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryFeaturesValue'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_factories_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_factories_operations.py index 04705d6abc97..acd597de0a7b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_factories_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_factories_operations.py @@ -5,17 +5,22 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._factories_operations import build_configure_factory_repo_request, build_create_or_update_request, build_delete_request, build_get_data_plane_access_request, build_get_git_hub_access_token_request, build_get_request, build_list_by_resource_group_request, build_list_request, build_update_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -41,6 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace def list( self, **kwargs: Any @@ -49,7 +55,8 @@ def list( :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either FactoryListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.FactoryListResponse] + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.FactoryListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FactoryListResponse"] @@ -57,34 +64,29 @@ def list( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + subscription_id=self._config.subscription_id, + template_url=self.list.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + subscription_id=self._config.subscription_id, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('FactoryListResponse', pipeline_response) + deserialized = self._deserialize("FactoryListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -102,11 +104,13 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged( get_next, extract_data ) list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/factories'} # type: ignore + @distributed_trace_async async def configure_factory_repo( self, location_id: str, @@ -129,31 +133,21 @@ async def configure_factory_repo( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.configure_factory_repo.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'locationId': self._serialize.url("location_id", location_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + _json = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate') + + request = build_configure_factory_repo_request( + subscription_id=self._config.subscription_id, + location_id=location_id, + content_type=content_type, + json=_json, + template_url=self.configure_factory_repo.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -167,8 +161,11 @@ async def configure_factory_repo( return cls(pipeline_response, deserialized, {}) return deserialized + configure_factory_repo.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/configureFactoryRepo'} # type: ignore + + @distributed_trace def list_by_resource_group( self, resource_group_name: str, @@ -180,7 +177,8 @@ def list_by_resource_group( :type resource_group_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either FactoryListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.FactoryListResponse] + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.FactoryListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FactoryListResponse"] @@ -188,35 +186,31 @@ def list_by_resource_group( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_resource_group.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_resource_group_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + template_url=self.list_by_resource_group.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_resource_group_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('FactoryListResponse', pipeline_response) + deserialized = self._deserialize("FactoryListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -234,11 +228,13 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged( get_next, extract_data ) list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories'} # type: ignore + @distributed_trace_async async def create_or_update( self, resource_group_name: str, @@ -268,34 +264,23 @@ async def create_or_update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(factory, 'Factory') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(factory, 'Factory') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.create_or_update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -309,8 +294,11 @@ async def create_or_update( return cls(pipeline_response, deserialized, {}) return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore + + @distributed_trace_async async def update( self, resource_group_name: str, @@ -336,32 +324,22 @@ async def update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + _json = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters') + + request = build_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + template_url=self.update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -375,8 +353,11 @@ async def update( return cls(pipeline_response, deserialized, {}) return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore + + @distributed_trace_async async def get( self, resource_group_name: str, @@ -403,29 +384,18 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + if_none_match=if_none_match, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -441,8 +411,11 @@ async def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore + + @distributed_trace_async async def delete( self, resource_group_name: str, @@ -465,27 +438,17 @@ async def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -498,6 +461,8 @@ async def delete( delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore + + @distributed_trace_async async def get_git_hub_access_token( self, resource_group_name: str, @@ -523,32 +488,22 @@ async def get_git_hub_access_token( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.get_git_hub_access_token.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_get_git_hub_access_token_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + template_url=self.get_git_hub_access_token.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -562,8 +517,11 @@ async def get_git_hub_access_token( return cls(pipeline_response, deserialized, {}) return deserialized + get_git_hub_access_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getGitHubAccessToken'} # type: ignore + + @distributed_trace_async async def get_data_plane_access( self, resource_group_name: str, @@ -589,32 +547,22 @@ async def get_data_plane_access( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.get_data_plane_access.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(policy, 'UserAccessPolicy') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_get_data_plane_access_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + template_url=self.get_data_plane_access.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(policy, 'UserAccessPolicy') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -628,4 +576,6 @@ async def get_data_plane_access( return cls(pipeline_response, deserialized, {}) return deserialized + get_data_plane_access.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getDataPlaneAccess'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_nodes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_nodes_operations.py index ad2c99d99edb..14e1dad210e9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_nodes_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_nodes_operations.py @@ -5,16 +5,20 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._integration_runtime_nodes_operations import build_delete_request, build_get_ip_address_request, build_get_request, build_update_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -40,6 +44,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace_async async def get( self, resource_group_name: str, @@ -68,29 +73,19 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + node_name=node_name, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -104,8 +99,11 @@ async def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore + + @distributed_trace_async async def delete( self, resource_group_name: str, @@ -134,29 +132,19 @@ async def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + node_name=node_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -169,6 +157,8 @@ async def delete( delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore + + @distributed_trace_async async def update( self, resource_group_name: str, @@ -190,7 +180,8 @@ async def update( :type node_name: str :param update_integration_runtime_node_request: The parameters for updating an integration runtime node. - :type update_integration_runtime_node_request: ~azure.mgmt.datafactory.models.UpdateIntegrationRuntimeNodeRequest + :type update_integration_runtime_node_request: + ~azure.mgmt.datafactory.models.UpdateIntegrationRuntimeNodeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode @@ -201,34 +192,24 @@ async def update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + node_name=node_name, + content_type=content_type, + json=_json, + template_url=self.update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -242,8 +223,11 @@ async def update( return cls(pipeline_response, deserialized, {}) return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore + + @distributed_trace_async async def get_ip_address( self, resource_group_name: str, @@ -272,29 +256,19 @@ async def get_ip_address( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get_ip_address.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_ip_address_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + node_name=node_name, + template_url=self.get_ip_address.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -308,4 +282,6 @@ async def get_ip_address( return cls(pipeline_response, deserialized, {}) return deserialized + get_ip_address.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}/ipAddress'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py index c121228898d3..3086e8dfae2e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py @@ -5,18 +5,22 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._integration_runtime_object_metadata_operations import build_get_request, build_refresh_request_initial T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -54,28 +58,18 @@ async def _refresh_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self._refresh_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_refresh_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + template_url=self._refresh_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -91,8 +85,11 @@ async def _refresh_initial( return cls(pipeline_response, deserialized, {}) return deserialized + _refresh_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata'} # type: ignore + + @distributed_trace_async async def begin_refresh( self, resource_group_name: str, @@ -110,15 +107,19 @@ async def begin_refresh( :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either SsisObjectMetadataStatusResponse or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.SsisObjectMetadataStatusResponse] - :raises ~azure.core.exceptions.HttpResponseError: + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either SsisObjectMetadataStatusResponse or + the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.SsisObjectMetadataStatusResponse] + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.SsisObjectMetadataStatusResponse"] lro_delay = kwargs.pop( 'polling_interval', @@ -133,25 +134,17 @@ async def begin_refresh( cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): + response = pipeline_response.http_response deserialized = self._deserialize('SsisObjectMetadataStatusResponse', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -163,8 +156,10 @@ def get_long_running_output(pipeline_response): ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_refresh.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata'} # type: ignore + @distributed_trace_async async def get( self, resource_group_name: str, @@ -194,36 +189,26 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - body_content_kwargs = {} # type: Dict[str, Any] if get_metadata_request is not None: - body_content = self._serialize.body(get_metadata_request, 'GetSsisObjectMetadataRequest') + _json = self._serialize.body(get_metadata_request, 'GetSsisObjectMetadataRequest') else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + _json = None + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + content_type=content_type, + json=_json, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -237,4 +222,6 @@ async def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getObjectMetadata'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtimes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtimes_operations.py index 19a13bbaf567..119fa3c30770 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtimes_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtimes_operations.py @@ -5,19 +5,24 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._integration_runtimes_operations import build_create_linked_integration_runtime_request, build_create_or_update_request, build_delete_request, build_get_connection_info_request, build_get_monitoring_data_request, build_get_request, build_get_status_request, build_list_auth_keys_request, build_list_by_factory_request, build_list_outbound_network_dependencies_endpoints_request, build_regenerate_auth_key_request, build_remove_links_request, build_start_request_initial, build_stop_request_initial, build_sync_credentials_request, build_update_request, build_upgrade_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -43,6 +48,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace def list_by_factory( self, resource_group_name: str, @@ -56,8 +62,10 @@ def list_by_factory( :param factory_name: The factory name. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either IntegrationRuntimeListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.IntegrationRuntimeListResponse] + :return: An iterator like instance of either IntegrationRuntimeListResponse or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.IntegrationRuntimeListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.IntegrationRuntimeListResponse"] @@ -65,36 +73,33 @@ def list_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=self.list_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('IntegrationRuntimeListResponse', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -112,11 +117,13 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged( get_next, extract_data ) list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes'} # type: ignore + @distributed_trace_async async def create_or_update( self, resource_group_name: str, @@ -149,35 +156,24 @@ async def create_or_update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.create_or_update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -191,8 +187,11 @@ async def create_or_update( return cls(pipeline_response, deserialized, {}) return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore + + @distributed_trace_async async def get( self, resource_group_name: str, @@ -223,30 +222,19 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + if_none_match=if_none_match, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -262,8 +250,11 @@ async def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore + + @distributed_trace_async async def update( self, resource_group_name: str, @@ -281,7 +272,8 @@ async def update( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :param update_integration_runtime_request: The parameters for updating an integration runtime. - :type update_integration_runtime_request: ~azure.mgmt.datafactory.models.UpdateIntegrationRuntimeRequest + :type update_integration_runtime_request: + ~azure.mgmt.datafactory.models.UpdateIntegrationRuntimeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeResource, or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource @@ -292,33 +284,23 @@ async def update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + content_type=content_type, + json=_json, + template_url=self.update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -332,8 +314,11 @@ async def update( return cls(pipeline_response, deserialized, {}) return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore + + @distributed_trace_async async def delete( self, resource_group_name: str, @@ -359,28 +344,18 @@ async def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -393,6 +368,8 @@ async def delete( delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore + + @distributed_trace_async async def get_status( self, resource_group_name: str, @@ -418,28 +395,18 @@ async def get_status( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get_status.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_status_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + template_url=self.get_status.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -453,8 +420,11 @@ async def get_status( return cls(pipeline_response, deserialized, {}) return deserialized + get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} # type: ignore + + @distributed_trace_async async def list_outbound_network_dependencies_endpoints( self, resource_group_name: str, @@ -471,8 +441,10 @@ async def list_outbound_network_dependencies_endpoints( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, or the result of cls(response) - :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse + :return: IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, or the result of + cls(response) + :rtype: + ~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse"] @@ -480,28 +452,18 @@ async def list_outbound_network_dependencies_endpoints( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.list_outbound_network_dependencies_endpoints.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_list_outbound_network_dependencies_endpoints_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + template_url=self.list_outbound_network_dependencies_endpoints.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -515,8 +477,11 @@ async def list_outbound_network_dependencies_endpoints( return cls(pipeline_response, deserialized, {}) return deserialized + list_outbound_network_dependencies_endpoints.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/outboundNetworkDependenciesEndpoints'} # type: ignore + + @distributed_trace_async async def get_connection_info( self, resource_group_name: str, @@ -543,28 +508,18 @@ async def get_connection_info( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get_connection_info.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_connection_info_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + template_url=self.get_connection_info.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -578,8 +533,11 @@ async def get_connection_info( return cls(pipeline_response, deserialized, {}) return deserialized + get_connection_info.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo'} # type: ignore + + @distributed_trace_async async def regenerate_auth_key( self, resource_group_name: str, @@ -598,7 +556,8 @@ async def regenerate_auth_key( :type integration_runtime_name: str :param regenerate_key_parameters: The parameters for regenerating integration runtime authentication key. - :type regenerate_key_parameters: ~azure.mgmt.datafactory.models.IntegrationRuntimeRegenerateKeyParameters + :type regenerate_key_parameters: + ~azure.mgmt.datafactory.models.IntegrationRuntimeRegenerateKeyParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeAuthKeys, or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeys @@ -609,33 +568,23 @@ async def regenerate_auth_key( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.regenerate_auth_key.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_regenerate_auth_key_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + content_type=content_type, + json=_json, + template_url=self.regenerate_auth_key.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -649,8 +598,11 @@ async def regenerate_auth_key( return cls(pipeline_response, deserialized, {}) return deserialized + regenerate_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey'} # type: ignore + + @distributed_trace_async async def list_auth_keys( self, resource_group_name: str, @@ -676,28 +628,18 @@ async def list_auth_keys( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.list_auth_keys.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_list_auth_keys_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + template_url=self.list_auth_keys.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -711,8 +653,10 @@ async def list_auth_keys( return cls(pipeline_response, deserialized, {}) return deserialized + list_auth_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys'} # type: ignore + async def _start_initial( self, resource_group_name: str, @@ -725,28 +669,18 @@ async def _start_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self._start_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_start_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + template_url=self._start_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -762,8 +696,11 @@ async def _start_initial( return cls(pipeline_response, deserialized, {}) return deserialized + _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore + + @distributed_trace_async async def begin_start( self, resource_group_name: str, @@ -781,15 +718,19 @@ async def begin_start( :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either IntegrationRuntimeStatusResponse or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] - :raises ~azure.core.exceptions.HttpResponseError: + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either IntegrationRuntimeStatusResponse or + the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.IntegrationRuntimeStatusResponse"] lro_delay = kwargs.pop( 'polling_interval', @@ -804,25 +745,17 @@ async def begin_start( cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): + response = pipeline_response.http_response deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -834,6 +767,7 @@ def get_long_running_output(pipeline_response): ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore async def _stop_initial( @@ -848,28 +782,18 @@ async def _stop_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self._stop_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_stop_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + template_url=self._stop_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -882,6 +806,8 @@ async def _stop_initial( _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore + + @distributed_trace_async async def begin_stop( self, resource_group_name: str, @@ -899,15 +825,17 @@ async def begin_stop( :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', @@ -922,22 +850,14 @@ async def begin_stop( cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -949,8 +869,10 @@ def get_long_running_output(pipeline_response): ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore + @distributed_trace_async async def sync_credentials( self, resource_group_name: str, @@ -979,28 +901,18 @@ async def sync_credentials( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.sync_credentials.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_sync_credentials_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + template_url=self.sync_credentials.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -1013,6 +925,8 @@ async def sync_credentials( sync_credentials.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials'} # type: ignore + + @distributed_trace_async async def get_monitoring_data( self, resource_group_name: str, @@ -1039,28 +953,18 @@ async def get_monitoring_data( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get_monitoring_data.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_monitoring_data_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + template_url=self.get_monitoring_data.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -1074,8 +978,11 @@ async def get_monitoring_data( return cls(pipeline_response, deserialized, {}) return deserialized + get_monitoring_data.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData'} # type: ignore + + @distributed_trace_async async def upgrade( self, resource_group_name: str, @@ -1101,28 +1008,18 @@ async def upgrade( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.upgrade.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_upgrade_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + template_url=self.upgrade.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -1135,6 +1032,8 @@ async def upgrade( upgrade.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade'} # type: ignore + + @distributed_trace_async async def remove_links( self, resource_group_name: str, @@ -1154,7 +1053,8 @@ async def remove_links( :type integration_runtime_name: str :param linked_integration_runtime_request: The data factory name for the linked integration runtime. - :type linked_integration_runtime_request: ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeRequest + :type linked_integration_runtime_request: + ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None @@ -1165,33 +1065,23 @@ async def remove_links( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.remove_links.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + _json = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') + + request = build_remove_links_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + content_type=content_type, + json=_json, + template_url=self.remove_links.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -1204,6 +1094,8 @@ async def remove_links( remove_links.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks'} # type: ignore + + @distributed_trace_async async def create_linked_integration_runtime( self, resource_group_name: str, @@ -1221,7 +1113,8 @@ async def create_linked_integration_runtime( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :param create_linked_integration_runtime_request: The linked integration runtime properties. - :type create_linked_integration_runtime_request: ~azure.mgmt.datafactory.models.CreateLinkedIntegrationRuntimeRequest + :type create_linked_integration_runtime_request: + ~azure.mgmt.datafactory.models.CreateLinkedIntegrationRuntimeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeStatusResponse, or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse @@ -1232,33 +1125,23 @@ async def create_linked_integration_runtime( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_linked_integration_runtime.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_create_linked_integration_runtime_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + content_type=content_type, + json=_json, + template_url=self.create_linked_integration_runtime.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -1272,4 +1155,6 @@ async def create_linked_integration_runtime( return cls(pipeline_response, deserialized, {}) return deserialized + create_linked_integration_runtime.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_linked_services_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_linked_services_operations.py index d174417dc70e..84f14e2a4e0c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_linked_services_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_linked_services_operations.py @@ -5,17 +5,22 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._linked_services_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_by_factory_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -41,6 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace def list_by_factory( self, resource_group_name: str, @@ -54,8 +60,10 @@ def list_by_factory( :param factory_name: The factory name. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either LinkedServiceListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.LinkedServiceListResponse] + :return: An iterator like instance of either LinkedServiceListResponse or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.LinkedServiceListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.LinkedServiceListResponse"] @@ -63,36 +71,33 @@ def list_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=self.list_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('LinkedServiceListResponse', pipeline_response) + deserialized = self._deserialize("LinkedServiceListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -110,11 +115,13 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged( get_next, extract_data ) list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices'} # type: ignore + @distributed_trace_async async def create_or_update( self, resource_group_name: str, @@ -147,35 +154,24 @@ async def create_or_update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(linked_service, 'LinkedServiceResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(linked_service, 'LinkedServiceResource') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + linked_service_name=linked_service_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.create_or_update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -189,8 +185,11 @@ async def create_or_update( return cls(pipeline_response, deserialized, {}) return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore + + @distributed_trace_async async def get( self, resource_group_name: str, @@ -221,30 +220,19 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + linked_service_name=linked_service_name, + if_none_match=if_none_match, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -260,8 +248,11 @@ async def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore + + @distributed_trace_async async def delete( self, resource_group_name: str, @@ -287,28 +278,18 @@ async def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + linked_service_name=linked_service_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -320,3 +301,4 @@ async def delete( return cls(pipeline_response, None, {}) delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_private_endpoints_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_private_endpoints_operations.py index 944fbb103654..b19b8d31b4a1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_private_endpoints_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_private_endpoints_operations.py @@ -5,17 +5,22 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._managed_private_endpoints_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_by_factory_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -41,6 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace def list_by_factory( self, resource_group_name: str, @@ -57,8 +63,10 @@ def list_by_factory( :param managed_virtual_network_name: Managed virtual network name. :type managed_virtual_network_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ManagedPrivateEndpointListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.ManagedPrivateEndpointListResponse] + :return: An iterator like instance of either ManagedPrivateEndpointListResponse or the result + of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.ManagedPrivateEndpointListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagedPrivateEndpointListResponse"] @@ -66,37 +74,35 @@ def list_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + template_url=self.list_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('ManagedPrivateEndpointListResponse', pipeline_response) + deserialized = self._deserialize("ManagedPrivateEndpointListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -114,11 +120,13 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged( get_next, extract_data ) list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints'} # type: ignore + @distributed_trace_async async def create_or_update( self, resource_group_name: str, @@ -154,36 +162,25 @@ async def create_or_update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(managed_private_endpoint, 'ManagedPrivateEndpointResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(managed_private_endpoint, 'ManagedPrivateEndpointResource') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + managed_private_endpoint_name=managed_private_endpoint_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.create_or_update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -197,8 +194,11 @@ async def create_or_update( return cls(pipeline_response, deserialized, {}) return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore + + @distributed_trace_async async def get( self, resource_group_name: str, @@ -232,31 +232,20 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + managed_private_endpoint_name=managed_private_endpoint_name, + if_none_match=if_none_match, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -270,8 +259,11 @@ async def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore + + @distributed_trace_async async def delete( self, resource_group_name: str, @@ -300,29 +292,19 @@ async def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + managed_private_endpoint_name=managed_private_endpoint_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -334,3 +316,4 @@ async def delete( return cls(pipeline_response, None, {}) delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_virtual_networks_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_virtual_networks_operations.py index 4a760a88b537..b10bcefd517b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_virtual_networks_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_virtual_networks_operations.py @@ -5,17 +5,22 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._managed_virtual_networks_operations import build_create_or_update_request, build_get_request, build_list_by_factory_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -41,6 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace def list_by_factory( self, resource_group_name: str, @@ -54,8 +60,10 @@ def list_by_factory( :param factory_name: The factory name. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ManagedVirtualNetworkListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.ManagedVirtualNetworkListResponse] + :return: An iterator like instance of either ManagedVirtualNetworkListResponse or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.ManagedVirtualNetworkListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagedVirtualNetworkListResponse"] @@ -63,36 +71,33 @@ def list_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=self.list_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('ManagedVirtualNetworkListResponse', pipeline_response) + deserialized = self._deserialize("ManagedVirtualNetworkListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -110,11 +115,13 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged( get_next, extract_data ) list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks'} # type: ignore + @distributed_trace_async async def create_or_update( self, resource_group_name: str, @@ -147,35 +154,24 @@ async def create_or_update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(managed_virtual_network, 'ManagedVirtualNetworkResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(managed_virtual_network, 'ManagedVirtualNetworkResource') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.create_or_update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -189,8 +185,11 @@ async def create_or_update( return cls(pipeline_response, deserialized, {}) return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore + + @distributed_trace_async async def get( self, resource_group_name: str, @@ -221,30 +220,19 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + if_none_match=if_none_match, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -258,4 +246,6 @@ async def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_operations.py index a829cafd0bfa..b3d9816cc57b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_operations.py @@ -5,17 +5,22 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._operations import build_list_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -41,6 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace def list( self, **kwargs: Any @@ -48,8 +54,10 @@ def list( """Lists the available Azure Data Factory API operations. :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either OperationListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.OperationListResponse] + :return: An iterator like instance of either OperationListResponse or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.OperationListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResponse"] @@ -57,30 +65,27 @@ def list( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = build_list_request( + template_url=self.list.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('OperationListResponse', pipeline_response) + deserialized = self._deserialize("OperationListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -98,6 +103,7 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged( get_next, extract_data ) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipeline_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipeline_runs_operations.py index a6c0ff7add0a..654a763c8318 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipeline_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipeline_runs_operations.py @@ -5,16 +5,20 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._pipeline_runs_operations import build_cancel_request, build_get_request, build_query_by_factory_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -40,6 +44,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace_async async def query_by_factory( self, resource_group_name: str, @@ -65,32 +70,22 @@ async def query_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(filter_parameters, 'RunFilterParameters') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_query_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + template_url=self.query_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -104,8 +99,11 @@ async def query_by_factory( return cls(pipeline_response, deserialized, {}) return deserialized + query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryPipelineRuns'} # type: ignore + + @distributed_trace_async async def get( self, resource_group_name: str, @@ -131,28 +129,18 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + run_id=run_id, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -166,8 +154,11 @@ async def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}'} # type: ignore + + @distributed_trace_async async def cancel( self, resource_group_name: str, @@ -197,30 +188,19 @@ async def cancel( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.cancel.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if is_recursive is not None: - query_parameters['isRecursive'] = self._serialize.query("is_recursive", is_recursive, 'bool') - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_cancel_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + run_id=run_id, + is_recursive=is_recursive, + template_url=self.cancel.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -232,3 +212,4 @@ async def cancel( return cls(pipeline_response, None, {}) cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/cancel'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipelines_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipelines_operations.py index fb22ca9c161e..9eb1286f4520 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipelines_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipelines_operations.py @@ -5,17 +5,22 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._pipelines_operations import build_create_or_update_request, build_create_run_request, build_delete_request, build_get_request, build_list_by_factory_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -41,6 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace def list_by_factory( self, resource_group_name: str, @@ -54,8 +60,10 @@ def list_by_factory( :param factory_name: The factory name. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PipelineListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.PipelineListResponse] + :return: An iterator like instance of either PipelineListResponse or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.PipelineListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.PipelineListResponse"] @@ -63,36 +71,33 @@ def list_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=self.list_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('PipelineListResponse', pipeline_response) + deserialized = self._deserialize("PipelineListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -110,11 +115,13 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged( get_next, extract_data ) list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines'} # type: ignore + @distributed_trace_async async def create_or_update( self, resource_group_name: str, @@ -147,35 +154,24 @@ async def create_or_update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(pipeline, 'PipelineResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(pipeline, 'PipelineResource') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + pipeline_name=pipeline_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.create_or_update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -189,8 +185,11 @@ async def create_or_update( return cls(pipeline_response, deserialized, {}) return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore + + @distributed_trace_async async def get( self, resource_group_name: str, @@ -220,30 +219,19 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + pipeline_name=pipeline_name, + if_none_match=if_none_match, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -259,8 +247,11 @@ async def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore + + @distributed_trace_async async def delete( self, resource_group_name: str, @@ -286,28 +277,18 @@ async def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + pipeline_name=pipeline_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -320,6 +301,8 @@ async def delete( delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore + + @distributed_trace_async async def create_run( self, resource_group_name: str, @@ -365,44 +348,30 @@ async def create_run( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_run.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if reference_pipeline_run_id is not None: - query_parameters['referencePipelineRunId'] = self._serialize.query("reference_pipeline_run_id", reference_pipeline_run_id, 'str') - if is_recovery is not None: - query_parameters['isRecovery'] = self._serialize.query("is_recovery", is_recovery, 'bool') - if start_activity_name is not None: - query_parameters['startActivityName'] = self._serialize.query("start_activity_name", start_activity_name, 'str') - if start_from_failure is not None: - query_parameters['startFromFailure'] = self._serialize.query("start_from_failure", start_from_failure, 'bool') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + if parameters is not None: - body_content = self._serialize.body(parameters, '{object}') + _json = self._serialize.body(parameters, '{object}') else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + _json = None + + request = build_create_run_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + pipeline_name=pipeline_name, + content_type=content_type, + json=_json, + reference_pipeline_run_id=reference_pipeline_run_id, + is_recovery=is_recovery, + start_activity_name=start_activity_name, + start_from_failure=start_from_failure, + template_url=self.create_run.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -416,4 +385,6 @@ async def create_run( return cls(pipeline_response, deserialized, {}) return deserialized + create_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}/createRun'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_end_point_connections_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_end_point_connections_operations.py index e49a3ca3e19e..f0ce8fe622e4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_end_point_connections_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_end_point_connections_operations.py @@ -5,17 +5,22 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._private_end_point_connections_operations import build_list_by_factory_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -41,6 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace def list_by_factory( self, resource_group_name: str, @@ -54,8 +60,10 @@ def list_by_factory( :param factory_name: The factory name. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PrivateEndpointConnectionListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.PrivateEndpointConnectionListResponse] + :return: An iterator like instance of either PrivateEndpointConnectionListResponse or the + result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.PrivateEndpointConnectionListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnectionListResponse"] @@ -63,36 +71,33 @@ def list_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=self.list_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('PrivateEndpointConnectionListResponse', pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnectionListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -110,6 +115,7 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged( get_next, extract_data ) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_endpoint_connection_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_endpoint_connection_operations.py index db02fd36b659..49294cd84f8b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_endpoint_connection_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_endpoint_connection_operations.py @@ -5,16 +5,20 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._private_endpoint_connection_operations import build_create_or_update_request, build_delete_request, build_get_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -40,6 +44,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace_async async def create_or_update( self, resource_group_name: str, @@ -58,7 +63,8 @@ async def create_or_update( :param private_endpoint_connection_name: The private endpoint connection name. :type private_endpoint_connection_name: str :param private_endpoint_wrapper: - :type private_endpoint_wrapper: ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequestResource + :type private_endpoint_wrapper: + ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequestResource :param if_match: ETag of the private endpoint connection entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str @@ -72,35 +78,24 @@ async def create_or_update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(private_endpoint_wrapper, 'PrivateLinkConnectionApprovalRequestResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(private_endpoint_wrapper, 'PrivateLinkConnectionApprovalRequestResource') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + private_endpoint_connection_name=private_endpoint_connection_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.create_or_update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -114,8 +109,11 @@ async def create_or_update( return cls(pipeline_response, deserialized, {}) return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + @distributed_trace_async async def get( self, resource_group_name: str, @@ -146,30 +144,19 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + private_endpoint_connection_name=private_endpoint_connection_name, + if_none_match=if_none_match, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -183,8 +170,11 @@ async def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + @distributed_trace_async async def delete( self, resource_group_name: str, @@ -210,28 +200,18 @@ async def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + private_endpoint_connection_name=private_endpoint_connection_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -243,3 +223,4 @@ async def delete( return cls(pipeline_response, None, {}) delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_link_resources_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_link_resources_operations.py index 17f1abad0051..90e0bd735062 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_link_resources_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_link_resources_operations.py @@ -5,16 +5,20 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._private_link_resources_operations import build_get_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -40,6 +44,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace_async async def get( self, resource_group_name: str, @@ -62,27 +67,17 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -96,4 +91,6 @@ async def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateLinkResources'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_trigger_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_trigger_runs_operations.py index c43e180febb9..dd9027a0e838 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_trigger_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_trigger_runs_operations.py @@ -5,16 +5,20 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._trigger_runs_operations import build_cancel_request, build_query_by_factory_request, build_rerun_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -40,6 +44,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace_async async def rerun( self, resource_group_name: str, @@ -68,29 +73,19 @@ async def rerun( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.rerun.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_rerun_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + run_id=run_id, + template_url=self.rerun.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -103,6 +98,8 @@ async def rerun( rerun.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun'} # type: ignore + + @distributed_trace_async async def cancel( self, resource_group_name: str, @@ -131,29 +128,19 @@ async def cancel( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.cancel.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_cancel_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + run_id=run_id, + template_url=self.cancel.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -166,6 +153,8 @@ async def cancel( cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/cancel'} # type: ignore + + @distributed_trace_async async def query_by_factory( self, resource_group_name: str, @@ -191,32 +180,22 @@ async def query_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(filter_parameters, 'RunFilterParameters') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_query_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + template_url=self.query_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -230,4 +209,6 @@ async def query_by_factory( return cls(pipeline_response, deserialized, {}) return deserialized + query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryTriggerRuns'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_triggers_operations.py index 1a2c6cc2a22f..5b9e92927dc9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_triggers_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_triggers_operations.py @@ -5,19 +5,24 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._triggers_operations import build_create_or_update_request, build_delete_request, build_get_event_subscription_status_request, build_get_request, build_list_by_factory_request, build_query_by_factory_request, build_start_request_initial, build_stop_request_initial, build_subscribe_to_events_request_initial, build_unsubscribe_from_events_request_initial T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -43,6 +48,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + @distributed_trace def list_by_factory( self, resource_group_name: str, @@ -57,7 +63,8 @@ def list_by_factory( :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either TriggerListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.TriggerListResponse] + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.TriggerListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.TriggerListResponse"] @@ -65,36 +72,33 @@ def list_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=self.list_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('TriggerListResponse', pipeline_response) + deserialized = self._deserialize("TriggerListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -112,11 +116,13 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged( get_next, extract_data ) list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers'} # type: ignore + @distributed_trace_async async def query_by_factory( self, resource_group_name: str, @@ -142,32 +148,22 @@ async def query_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(filter_parameters, 'TriggerFilterParameters') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_query_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + template_url=self.query_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'TriggerFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -181,8 +177,11 @@ async def query_by_factory( return cls(pipeline_response, deserialized, {}) return deserialized + query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers'} # type: ignore + + @distributed_trace_async async def create_or_update( self, resource_group_name: str, @@ -215,35 +214,24 @@ async def create_or_update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(trigger, 'TriggerResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(trigger, 'TriggerResource') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.create_or_update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -257,8 +245,11 @@ async def create_or_update( return cls(pipeline_response, deserialized, {}) return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore + + @distributed_trace_async async def get( self, resource_group_name: str, @@ -288,30 +279,19 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + if_none_match=if_none_match, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -327,8 +307,11 @@ async def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore + + @distributed_trace_async async def delete( self, resource_group_name: str, @@ -354,28 +337,18 @@ async def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -388,6 +361,7 @@ async def delete( delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore + async def _subscribe_to_events_initial( self, resource_group_name: str, @@ -400,28 +374,18 @@ async def _subscribe_to_events_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self._subscribe_to_events_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_subscribe_to_events_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + template_url=self._subscribe_to_events_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -437,8 +401,11 @@ async def _subscribe_to_events_initial( return cls(pipeline_response, deserialized, {}) return deserialized + _subscribe_to_events_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore + + @distributed_trace_async async def begin_subscribe_to_events( self, resource_group_name: str, @@ -456,15 +423,19 @@ async def begin_subscribe_to_events( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] - :raises ~azure.core.exceptions.HttpResponseError: + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either TriggerSubscriptionOperationStatus + or the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.TriggerSubscriptionOperationStatus"] lro_delay = kwargs.pop( 'polling_interval', @@ -479,25 +450,17 @@ async def begin_subscribe_to_events( cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): + response = pipeline_response.http_response deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -509,8 +472,10 @@ def get_long_running_output(pipeline_response): ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_subscribe_to_events.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore + @distributed_trace_async async def get_event_subscription_status( self, resource_group_name: str, @@ -536,28 +501,18 @@ async def get_event_subscription_status( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get_event_subscription_status.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_event_subscription_status_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + template_url=self.get_event_subscription_status.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -571,8 +526,10 @@ async def get_event_subscription_status( return cls(pipeline_response, deserialized, {}) return deserialized + get_event_subscription_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus'} # type: ignore + async def _unsubscribe_from_events_initial( self, resource_group_name: str, @@ -585,28 +542,18 @@ async def _unsubscribe_from_events_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self._unsubscribe_from_events_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_unsubscribe_from_events_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + template_url=self._unsubscribe_from_events_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -622,8 +569,11 @@ async def _unsubscribe_from_events_initial( return cls(pipeline_response, deserialized, {}) return deserialized + _unsubscribe_from_events_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore + + @distributed_trace_async async def begin_unsubscribe_from_events( self, resource_group_name: str, @@ -641,15 +591,19 @@ async def begin_unsubscribe_from_events( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] - :raises ~azure.core.exceptions.HttpResponseError: + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either TriggerSubscriptionOperationStatus + or the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.TriggerSubscriptionOperationStatus"] lro_delay = kwargs.pop( 'polling_interval', @@ -664,25 +618,17 @@ async def begin_unsubscribe_from_events( cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): + response = pipeline_response.http_response deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -694,6 +640,7 @@ def get_long_running_output(pipeline_response): ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_unsubscribe_from_events.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore async def _start_initial( @@ -708,28 +655,18 @@ async def _start_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self._start_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_start_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + template_url=self._start_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -742,6 +679,8 @@ async def _start_initial( _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore + + @distributed_trace_async async def begin_start( self, resource_group_name: str, @@ -759,15 +698,17 @@ async def begin_start( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', @@ -782,22 +723,14 @@ async def begin_start( cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -809,6 +742,7 @@ def get_long_running_output(pipeline_response): ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore async def _stop_initial( @@ -823,28 +757,18 @@ async def _stop_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self._stop_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_stop_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + template_url=self._stop_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -857,6 +781,8 @@ async def _stop_initial( _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore + + @distributed_trace_async async def begin_stop( self, resource_group_name: str, @@ -874,15 +800,17 @@ async def begin_stop( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', @@ -897,22 +825,14 @@ async def begin_stop( cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -924,4 +844,5 @@ def get_long_running_output(pipeline_response): ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py index 158bf6b35e73..c52584bd31a1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py @@ -6,33 +6,18 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from enum import Enum, EnumMeta +from enum import Enum from six import with_metaclass +from azure.core import CaseInsensitiveEnumMeta -class _CaseInsensitiveEnumMeta(EnumMeta): - def __getitem__(self, name): - return super().__getitem__(name.upper()) - def __getattr__(cls, name): - """Return the enum member matching `name` - We use __getattr__ instead of descriptors or inserting into the enum - class' __dict__ in order to support `name` and `value` being both - properties for enum members (which live in the class' __dict__) and - enum members themselves. - """ - try: - return cls._member_map_[name.upper()] - except KeyError: - raise AttributeError(name) - - -class AmazonRdsForOraclePartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class AmazonRdsForOraclePartitionOption(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "None" PHYSICAL_PARTITIONS_OF_TABLE = "PhysicalPartitionsOfTable" DYNAMIC_RANGE = "DynamicRange" -class AvroCompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class AvroCompressionCodec(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "none" DEFLATE = "deflate" @@ -40,7 +25,7 @@ class AvroCompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): XZ = "xz" BZIP2 = "bzip2" -class AzureFunctionActivityMethod(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class AzureFunctionActivityMethod(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The list of HTTP methods supported by a AzureFunctionActivity. """ @@ -52,19 +37,19 @@ class AzureFunctionActivityMethod(with_metaclass(_CaseInsensitiveEnumMeta, str, HEAD = "HEAD" TRACE = "TRACE" -class AzureSearchIndexWriteBehaviorType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class AzureSearchIndexWriteBehaviorType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Specify the write behavior when upserting documents into Azure Search Index. """ MERGE = "Merge" UPLOAD = "Upload" -class BlobEventTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class BlobEventTypes(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): MICROSOFT_STORAGE_BLOB_CREATED = "Microsoft.Storage.BlobCreated" MICROSOFT_STORAGE_BLOB_DELETED = "Microsoft.Storage.BlobDeleted" -class CassandraSourceReadConsistencyLevels(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class CassandraSourceReadConsistencyLevels(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The consistency level specifies how many Cassandra servers must respond to a read request before returning data to the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the read request. Must be one of @@ -82,7 +67,7 @@ class CassandraSourceReadConsistencyLevels(with_metaclass(_CaseInsensitiveEnumMe SERIAL = "SERIAL" LOCAL_SERIAL = "LOCAL_SERIAL" -class CompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class CompressionCodec(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """All available compressionCodec values. """ @@ -97,7 +82,7 @@ class CompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): TAR = "tar" TAR_G_ZIP = "tarGZip" -class CopyBehaviorType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class CopyBehaviorType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """All available types of copy behavior. """ @@ -105,7 +90,7 @@ class CopyBehaviorType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): FLATTEN_HIERARCHY = "FlattenHierarchy" MERGE_FILES = "MergeFiles" -class CosmosDbConnectionMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class CosmosDbConnectionMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The connection mode used to access CosmosDB account. Type: string (or Expression with resultType string). """ @@ -113,7 +98,7 @@ class CosmosDbConnectionMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum) GATEWAY = "Gateway" DIRECT = "Direct" -class CosmosDbServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class CosmosDbServicePrincipalCredentialType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). @@ -122,7 +107,7 @@ class CosmosDbServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnum SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey" SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" -class DataFlowComputeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class DataFlowComputeType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Compute type of the cluster which will execute data flow job. """ @@ -130,7 +115,7 @@ class DataFlowComputeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): MEMORY_OPTIMIZED = "MemoryOptimized" COMPUTE_OPTIMIZED = "ComputeOptimized" -class DataFlowDebugCommandType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class DataFlowDebugCommandType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The command type. """ @@ -138,14 +123,14 @@ class DataFlowDebugCommandType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enu EXECUTE_STATISTICS_QUERY = "executeStatisticsQuery" EXECUTE_EXPRESSION_QUERY = "executeExpressionQuery" -class DatasetCompressionLevel(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class DatasetCompressionLevel(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """All available compression levels. """ OPTIMAL = "Optimal" FASTEST = "Fastest" -class DayOfWeek(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class DayOfWeek(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The days of the week. """ @@ -157,7 +142,7 @@ class DayOfWeek(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): FRIDAY = "Friday" SATURDAY = "Saturday" -class DaysOfWeek(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class DaysOfWeek(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): SUNDAY = "Sunday" MONDAY = "Monday" @@ -167,21 +152,21 @@ class DaysOfWeek(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): FRIDAY = "Friday" SATURDAY = "Saturday" -class Db2AuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class Db2AuthenticationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """AuthenticationType to be used for connection. It is mutually exclusive with connectionString property. """ BASIC = "Basic" -class DependencyCondition(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class DependencyCondition(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): SUCCEEDED = "Succeeded" FAILED = "Failed" SKIPPED = "Skipped" COMPLETED = "Completed" -class DynamicsAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class DynamicsAuthenticationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """All available dynamicsAuthenticationType values. """ @@ -189,20 +174,20 @@ class DynamicsAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, E IFD = "Ifd" AAD_SERVICE_PRINCIPAL = "AADServicePrincipal" -class DynamicsDeploymentType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class DynamicsDeploymentType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """All available dynamicsDeploymentType values. """ ONLINE = "Online" ON_PREMISES_WITH_IFD = "OnPremisesWithIfd" -class DynamicsSinkWriteBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class DynamicsSinkWriteBehavior(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Defines values for DynamicsSinkWriteBehavior. """ UPSERT = "Upsert" -class EventSubscriptionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class EventSubscriptionStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Event Subscription Status. """ @@ -212,7 +197,7 @@ class EventSubscriptionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum DISABLED = "Disabled" UNKNOWN = "Unknown" -class FactoryIdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class FactoryIdentityType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The identity type. """ @@ -220,14 +205,14 @@ class FactoryIdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): USER_ASSIGNED = "UserAssigned" SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned" -class FtpAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class FtpAuthenticationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the FTP server. """ BASIC = "Basic" ANONYMOUS = "Anonymous" -class GlobalParameterType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class GlobalParameterType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Global Parameter type. """ @@ -238,7 +223,7 @@ class GlobalParameterType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): BOOL = "Bool" ARRAY = "Array" -class GoogleAdWordsAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class GoogleAdWordsAuthenticationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. """ @@ -246,7 +231,7 @@ class GoogleAdWordsAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, s SERVICE_AUTHENTICATION = "ServiceAuthentication" USER_AUTHENTICATION = "UserAuthentication" -class GoogleBigQueryAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class GoogleBigQueryAuthenticationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. """ @@ -254,14 +239,14 @@ class GoogleBigQueryAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, SERVICE_AUTHENTICATION = "ServiceAuthentication" USER_AUTHENTICATION = "UserAuthentication" -class HBaseAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class HBaseAuthenticationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The authentication mechanism to use to connect to the HBase server. """ ANONYMOUS = "Anonymous" BASIC = "Basic" -class HdiNodeTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class HdiNodeTypes(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """All available HdiNodeTypes values. """ @@ -269,7 +254,7 @@ class HdiNodeTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): WORKERNODE = "Workernode" ZOOKEEPER = "Zookeeper" -class HDInsightActivityDebugInfoOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class HDInsightActivityDebugInfoOption(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The HDInsightActivityDebugInfoOption settings to use. """ @@ -277,7 +262,7 @@ class HDInsightActivityDebugInfoOption(with_metaclass(_CaseInsensitiveEnumMeta, ALWAYS = "Always" FAILURE = "Failure" -class HiveAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class HiveAuthenticationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The authentication method used to access the Hive server. """ @@ -286,7 +271,7 @@ class HiveAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum) USERNAME_AND_PASSWORD = "UsernameAndPassword" WINDOWS_AZURE_HD_INSIGHT_SERVICE = "WindowsAzureHDInsightService" -class HiveServerType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class HiveServerType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The type of Hive server. """ @@ -294,7 +279,7 @@ class HiveServerType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): HIVE_SERVER2 = "HiveServer2" HIVE_THRIFT_SERVER = "HiveThriftServer" -class HiveThriftTransportProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class HiveThriftTransportProtocol(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The transport protocol to use in the Thrift layer. """ @@ -302,7 +287,7 @@ class HiveThriftTransportProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, SASL = "SASL" HTTP = "HTTP " -class HttpAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class HttpAuthenticationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the HTTP server. """ @@ -312,7 +297,7 @@ class HttpAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum) WINDOWS = "Windows" CLIENT_CERTIFICATE = "ClientCertificate" -class ImpalaAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class ImpalaAuthenticationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to use. """ @@ -320,35 +305,35 @@ class ImpalaAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enu SASL_USERNAME = "SASLUsername" USERNAME_AND_PASSWORD = "UsernameAndPassword" -class IntegrationRuntimeAuthKeyName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class IntegrationRuntimeAuthKeyName(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The name of the authentication key to regenerate. """ AUTH_KEY1 = "authKey1" AUTH_KEY2 = "authKey2" -class IntegrationRuntimeAutoUpdate(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class IntegrationRuntimeAutoUpdate(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The state of integration runtime auto update. """ ON = "On" OFF = "Off" -class IntegrationRuntimeEdition(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class IntegrationRuntimeEdition(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The edition for the SSIS Integration Runtime """ STANDARD = "Standard" ENTERPRISE = "Enterprise" -class IntegrationRuntimeEntityReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class IntegrationRuntimeEntityReferenceType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The type of this referenced entity. """ INTEGRATION_RUNTIME_REFERENCE = "IntegrationRuntimeReference" LINKED_SERVICE_REFERENCE = "LinkedServiceReference" -class IntegrationRuntimeInternalChannelEncryptionMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class IntegrationRuntimeInternalChannelEncryptionMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """It is used to set the encryption mode for node-node communication channel (when more than 2 self-hosted integration runtime nodes exist). """ @@ -357,14 +342,14 @@ class IntegrationRuntimeInternalChannelEncryptionMode(with_metaclass(_CaseInsens SSL_ENCRYPTED = "SslEncrypted" NOT_ENCRYPTED = "NotEncrypted" -class IntegrationRuntimeLicenseType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class IntegrationRuntimeLicenseType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """License type for bringing your own license scenario. """ BASE_PRICE = "BasePrice" LICENSE_INCLUDED = "LicenseIncluded" -class IntegrationRuntimeSsisCatalogPricingTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class IntegrationRuntimeSsisCatalogPricingTier(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The pricing tier for the catalog database. The valid values could be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/ """ @@ -374,7 +359,7 @@ class IntegrationRuntimeSsisCatalogPricingTier(with_metaclass(_CaseInsensitiveEn PREMIUM = "Premium" PREMIUM_RS = "PremiumRS" -class IntegrationRuntimeState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class IntegrationRuntimeState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The state of integration runtime. """ @@ -389,14 +374,14 @@ class IntegrationRuntimeState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum OFFLINE = "Offline" ACCESS_DENIED = "AccessDenied" -class IntegrationRuntimeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class IntegrationRuntimeType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The type of integration runtime. """ MANAGED = "Managed" SELF_HOSTED = "SelfHosted" -class IntegrationRuntimeUpdateResult(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class IntegrationRuntimeUpdateResult(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The result of the last integration runtime node update. """ @@ -404,21 +389,21 @@ class IntegrationRuntimeUpdateResult(with_metaclass(_CaseInsensitiveEnumMeta, st SUCCEED = "Succeed" FAIL = "Fail" -class JsonFormatFilePattern(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class JsonFormatFilePattern(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """JSON format file pattern. A property of JsonFormat. """ SET_OF_OBJECTS = "setOfObjects" ARRAY_OF_OBJECTS = "arrayOfObjects" -class JsonWriteFilePattern(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class JsonWriteFilePattern(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """All available filePatterns. """ SET_OF_OBJECTS = "setOfObjects" ARRAY_OF_OBJECTS = "arrayOfObjects" -class ManagedIntegrationRuntimeNodeStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class ManagedIntegrationRuntimeNodeStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The managed integration runtime node status. """ @@ -427,14 +412,14 @@ class ManagedIntegrationRuntimeNodeStatus(with_metaclass(_CaseInsensitiveEnumMet RECYCLING = "Recycling" UNAVAILABLE = "Unavailable" -class MongoDbAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class MongoDbAuthenticationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the MongoDB database. """ BASIC = "Basic" ANONYMOUS = "Anonymous" -class NetezzaPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class NetezzaPartitionOption(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The partition mechanism that will be used for Netezza read in parallel. """ @@ -442,14 +427,14 @@ class NetezzaPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum) DATA_SLICE = "DataSlice" DYNAMIC_RANGE = "DynamicRange" -class ODataAadServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class ODataAadServicePrincipalCredentialType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Specify the credential type (key or cert) is used for service principal. """ SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey" SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" -class ODataAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class ODataAuthenticationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Type of authentication used to connect to the OData service. """ @@ -459,7 +444,7 @@ class ODataAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum AAD_SERVICE_PRINCIPAL = "AadServicePrincipal" MANAGED_SERVICE_IDENTITY = "ManagedServiceIdentity" -class OraclePartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class OraclePartitionOption(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The partition mechanism that will be used for Oracle read in parallel. """ @@ -467,14 +452,14 @@ class OraclePartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)) PHYSICAL_PARTITIONS_OF_TABLE = "PhysicalPartitionsOfTable" DYNAMIC_RANGE = "DynamicRange" -class OrcCompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class OrcCompressionCodec(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): NONE = "none" ZLIB = "zlib" SNAPPY = "snappy" LZO = "lzo" -class ParameterType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class ParameterType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Parameter type. """ @@ -486,7 +471,7 @@ class ParameterType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): ARRAY = "Array" SECURE_STRING = "SecureString" -class PhoenixAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class PhoenixAuthenticationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The authentication mechanism used to connect to the Phoenix server. """ @@ -494,28 +479,28 @@ class PhoenixAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, En USERNAME_AND_PASSWORD = "UsernameAndPassword" WINDOWS_AZURE_HD_INSIGHT_SERVICE = "WindowsAzureHDInsightService" -class PolybaseSettingsRejectType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class PolybaseSettingsRejectType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Indicates whether the RejectValue property is specified as a literal value or a percentage. """ VALUE = "value" PERCENTAGE = "percentage" -class PrestoAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class PrestoAuthenticationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The authentication mechanism used to connect to the Presto server. """ ANONYMOUS = "Anonymous" LDAP = "LDAP" -class PublicNetworkAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class PublicNetworkAccess(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Whether or not public network access is allowed for the data factory. """ ENABLED = "Enabled" DISABLED = "Disabled" -class RecurrenceFrequency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class RecurrenceFrequency(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Enumerates possible frequency option for the schedule trigger. """ @@ -527,7 +512,7 @@ class RecurrenceFrequency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): MONTH = "Month" YEAR = "Year" -class RestServiceAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class RestServiceAuthenticationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Type of authentication used to connect to the REST service. """ @@ -536,7 +521,7 @@ class RestServiceAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str AAD_SERVICE_PRINCIPAL = "AadServicePrincipal" MANAGED_SERVICE_IDENTITY = "ManagedServiceIdentity" -class RunQueryFilterOperand(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class RunQueryFilterOperand(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Parameter name to be used for filter. The allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd and Status; to query activity runs are ActivityName, ActivityRunStart, ActivityRunEnd, ActivityType and Status, and to query trigger runs are @@ -556,7 +541,7 @@ class RunQueryFilterOperand(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)) RUN_GROUP_ID = "RunGroupId" LATEST_ONLY = "LatestOnly" -class RunQueryFilterOperator(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class RunQueryFilterOperator(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Operator to be used for filter. """ @@ -565,14 +550,14 @@ class RunQueryFilterOperator(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum) IN_ENUM = "In" NOT_IN = "NotIn" -class RunQueryOrder(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class RunQueryOrder(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Sorting order of the parameter. """ ASC = "ASC" DESC = "DESC" -class RunQueryOrderByField(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class RunQueryOrderByField(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Parameter name to be used for order by. The allowed parameters to order by for pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are ActivityName, ActivityRunStart, ActivityRunEnd and Status; for trigger runs are TriggerName, @@ -589,35 +574,35 @@ class RunQueryOrderByField(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): TRIGGER_NAME = "TriggerName" TRIGGER_RUN_TIMESTAMP = "TriggerRunTimestamp" -class SalesforceSinkWriteBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class SalesforceSinkWriteBehavior(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The write behavior for the operation. Default is Insert. """ INSERT = "Insert" UPSERT = "Upsert" -class SalesforceSourceReadBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class SalesforceSourceReadBehavior(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The read behavior for the operation. Default is Query. """ QUERY = "Query" QUERY_ALL = "QueryAll" -class SapCloudForCustomerSinkWriteBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class SapCloudForCustomerSinkWriteBehavior(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The write behavior for the operation. Default is 'Insert'. """ INSERT = "Insert" UPDATE = "Update" -class SapHanaAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class SapHanaAuthenticationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the SAP HANA server. """ BASIC = "Basic" WINDOWS = "Windows" -class SapHanaPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class SapHanaPartitionOption(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The partition mechanism that will be used for SAP HANA read in parallel. """ @@ -625,7 +610,7 @@ class SapHanaPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum) PHYSICAL_PARTITIONS_OF_TABLE = "PhysicalPartitionsOfTable" SAP_HANA_DYNAMIC_RANGE = "SapHanaDynamicRange" -class SapTablePartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class SapTablePartitionOption(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The partition mechanism that will be used for SAP table read in parallel. """ @@ -636,7 +621,7 @@ class SapTablePartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum PARTITION_ON_CALENDAR_DATE = "PartitionOnCalendarDate" PARTITION_ON_TIME = "PartitionOnTime" -class SelfHostedIntegrationRuntimeNodeStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class SelfHostedIntegrationRuntimeNodeStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Status of the integration runtime node. """ @@ -648,21 +633,21 @@ class SelfHostedIntegrationRuntimeNodeStatus(with_metaclass(_CaseInsensitiveEnum INITIALIZING = "Initializing" INITIALIZE_FAILED = "InitializeFailed" -class ServiceNowAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class ServiceNowAuthenticationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to use. """ BASIC = "Basic" O_AUTH2 = "OAuth2" -class ServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class ServicePrincipalCredentialType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """All available servicePrincipalCredentialType values. """ SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey" SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" -class SftpAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class SftpAuthenticationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the FTP server. """ @@ -670,7 +655,7 @@ class SftpAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum) SSH_PUBLIC_KEY = "SshPublicKey" MULTI_FACTOR = "MultiFactor" -class SparkAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class SparkAuthenticationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The authentication method used to access the Spark server. """ @@ -679,7 +664,7 @@ class SparkAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum USERNAME_AND_PASSWORD = "UsernameAndPassword" WINDOWS_AZURE_HD_INSIGHT_SERVICE = "WindowsAzureHDInsightService" -class SparkServerType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class SparkServerType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The type of Spark server. """ @@ -687,7 +672,7 @@ class SparkServerType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): SHARK_SERVER2 = "SharkServer2" SPARK_THRIFT_SERVER = "SparkThriftServer" -class SparkThriftTransportProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class SparkThriftTransportProtocol(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The transport protocol to use in the Thrift layer. """ @@ -695,7 +680,7 @@ class SparkThriftTransportProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, SASL = "SASL" HTTP = "HTTP " -class SqlAlwaysEncryptedAkvAuthType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class SqlAlwaysEncryptedAkvAuthType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Sql always encrypted AKV authentication type. Type: string (or Expression with resultType string). """ @@ -704,14 +689,14 @@ class SqlAlwaysEncryptedAkvAuthType(with_metaclass(_CaseInsensitiveEnumMeta, str MANAGED_IDENTITY = "ManagedIdentity" USER_ASSIGNED_MANAGED_IDENTITY = "UserAssignedManagedIdentity" -class SqlDWWriteBehaviorEnum(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class SqlDWWriteBehaviorEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Specify the write behavior when copying data into sql dw. """ INSERT = "Insert" UPSERT = "Upsert" -class SqlPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class SqlPartitionOption(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The partition mechanism that will be used for Sql read in parallel. """ @@ -719,7 +704,7 @@ class SqlPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): PHYSICAL_PARTITIONS_OF_TABLE = "PhysicalPartitionsOfTable" DYNAMIC_RANGE = "DynamicRange" -class SqlWriteBehaviorEnum(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class SqlWriteBehaviorEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Specify the write behavior when copying data into sql. """ @@ -727,13 +712,13 @@ class SqlWriteBehaviorEnum(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): UPSERT = "Upsert" STORED_PROCEDURE = "StoredProcedure" -class SsisLogLocationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class SsisLogLocationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The type of SSIS log location. """ FILE = "File" -class SsisObjectMetadataType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class SsisObjectMetadataType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The type of SSIS object metadata. """ @@ -742,7 +727,7 @@ class SsisObjectMetadataType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum) PACKAGE = "Package" ENVIRONMENT = "Environment" -class SsisPackageLocationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class SsisPackageLocationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The type of SSIS package location. """ @@ -751,7 +736,7 @@ class SsisPackageLocationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum INLINE_PACKAGE = "InlinePackage" PACKAGE_STORE = "PackageStore" -class StoredProcedureParameterType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class StoredProcedureParameterType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Stored procedure parameter type. """ @@ -763,21 +748,21 @@ class StoredProcedureParameterType(with_metaclass(_CaseInsensitiveEnumMeta, str, BOOLEAN = "Boolean" DATE = "Date" -class SybaseAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class SybaseAuthenticationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """AuthenticationType to be used for connection. """ BASIC = "Basic" WINDOWS = "Windows" -class TeradataAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class TeradataAuthenticationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """AuthenticationType to be used for connection. """ BASIC = "Basic" WINDOWS = "Windows" -class TeradataPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class TeradataPartitionOption(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The partition mechanism that will be used for teradata read in parallel. """ @@ -785,7 +770,7 @@ class TeradataPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum HASH = "Hash" DYNAMIC_RANGE = "DynamicRange" -class TriggerRunStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class TriggerRunStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Trigger run status. """ @@ -793,7 +778,7 @@ class TriggerRunStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): FAILED = "Failed" INPROGRESS = "Inprogress" -class TriggerRuntimeState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class TriggerRuntimeState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Enumerates possible state of Triggers. """ @@ -801,7 +786,7 @@ class TriggerRuntimeState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): STOPPED = "Stopped" DISABLED = "Disabled" -class TumblingWindowFrequency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class TumblingWindowFrequency(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Enumerates possible frequency option for the tumbling window trigger. """ @@ -809,7 +794,7 @@ class TumblingWindowFrequency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum HOUR = "Hour" MONTH = "Month" -class VariableType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class VariableType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Variable type. """ @@ -817,7 +802,7 @@ class VariableType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): BOOL = "Bool" ARRAY = "Array" -class WebActivityMethod(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class WebActivityMethod(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The list of HTTP methods supported by a WebActivity. """ @@ -826,7 +811,7 @@ class WebActivityMethod(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): PUT = "PUT" DELETE = "DELETE" -class WebAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class WebAuthenticationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Type of authentication used to connect to the web table source. """ @@ -834,7 +819,7 @@ class WebAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)) ANONYMOUS = "Anonymous" CLIENT_CERTIFICATE = "ClientCertificate" -class WebHookActivityMethod(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class WebHookActivityMethod(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """The list of HTTP methods supported by a WebHook activity. """ diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py index d5a79892a6bf..bf6c1e62d97e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py @@ -12,12 +12,12 @@ class AccessPolicyResponse(msrest.serialization.Model): """Get Data Plane read only token response definition. - :param policy: The user access policy. - :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy - :param access_token: Data Plane read only access token. - :type access_token: str - :param data_plane_url: Data Plane service base URL. - :type data_plane_url: str + :ivar policy: The user access policy. + :vartype policy: ~azure.mgmt.datafactory.models.UserAccessPolicy + :ivar access_token: Data Plane read only access token. + :vartype access_token: str + :ivar data_plane_url: Data Plane service base URL. + :vartype data_plane_url: str """ _attribute_map = { @@ -30,6 +30,14 @@ def __init__( self, **kwargs ): + """ + :keyword policy: The user access policy. + :paramtype policy: ~azure.mgmt.datafactory.models.UserAccessPolicy + :keyword access_token: Data Plane read only access token. + :paramtype access_token: str + :keyword data_plane_url: Data Plane service base URL. + :paramtype data_plane_url: str + """ super(AccessPolicyResponse, self).__init__(**kwargs) self.policy = kwargs.get('policy', None) self.access_token = kwargs.get('access_token', None) @@ -44,19 +52,19 @@ class Activity(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] """ _validation = { @@ -81,6 +89,19 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + """ super(Activity, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.name = kwargs['name'] @@ -95,13 +116,13 @@ class ActivityDependency(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param activity: Required. Activity name. - :type activity: str - :param dependency_conditions: Required. Match-Condition for the dependency. - :type dependency_conditions: list[str or ~azure.mgmt.datafactory.models.DependencyCondition] + :vartype additional_properties: dict[str, any] + :ivar activity: Required. Activity name. + :vartype activity: str + :ivar dependency_conditions: Required. Match-Condition for the dependency. + :vartype dependency_conditions: list[str or ~azure.mgmt.datafactory.models.DependencyCondition] """ _validation = { @@ -119,6 +140,16 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword activity: Required. Activity name. + :paramtype activity: str + :keyword dependency_conditions: Required. Match-Condition for the dependency. + :paramtype dependency_conditions: list[str or + ~azure.mgmt.datafactory.models.DependencyCondition] + """ super(ActivityDependency, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.activity = kwargs['activity'] @@ -128,25 +159,25 @@ def __init__( class ActivityPolicy(msrest.serialization.Model): """Execution policy for an activity. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param timeout: Specifies the timeout for the activity to run. The default timeout is 7 days. + :vartype additional_properties: dict[str, any] + :ivar timeout: Specifies the timeout for the activity to run. The default timeout is 7 days. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: any - :param retry: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with + :vartype timeout: any + :ivar retry: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. - :type retry: any - :param retry_interval_in_seconds: Interval between each retry attempt (in seconds). The default + :vartype retry: any + :ivar retry_interval_in_seconds: Interval between each retry attempt (in seconds). The default is 30 sec. - :type retry_interval_in_seconds: int - :param secure_input: When set to true, Input from activity is considered as secure and will not + :vartype retry_interval_in_seconds: int + :ivar secure_input: When set to true, Input from activity is considered as secure and will not be logged to monitoring. - :type secure_input: bool - :param secure_output: When set to true, Output from activity is considered as secure and will + :vartype secure_input: bool + :ivar secure_output: When set to true, Output from activity is considered as secure and will not be logged to monitoring. - :type secure_output: bool + :vartype secure_output: bool """ _validation = { @@ -166,6 +197,27 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword timeout: Specifies the timeout for the activity to run. The default timeout is 7 days. + Type: string (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype timeout: any + :keyword retry: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression + with resultType integer), minimum: 0. + :paramtype retry: any + :keyword retry_interval_in_seconds: Interval between each retry attempt (in seconds). The + default is 30 sec. + :paramtype retry_interval_in_seconds: int + :keyword secure_input: When set to true, Input from activity is considered as secure and will + not be logged to monitoring. + :paramtype secure_input: bool + :keyword secure_output: When set to true, Output from activity is considered as secure and will + not be logged to monitoring. + :paramtype secure_output: bool + """ super(ActivityPolicy, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.timeout = kwargs.get('timeout', None) @@ -180,9 +232,9 @@ class ActivityRun(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] + :vartype additional_properties: dict[str, any] :ivar pipeline_name: The name of the pipeline. :vartype pipeline_name: str :ivar pipeline_run_id: The id of the pipeline run. @@ -248,6 +300,11 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(ActivityRun, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.pipeline_name = None @@ -270,11 +327,11 @@ class ActivityRunsQueryResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of activity runs. - :type value: list[~azure.mgmt.datafactory.models.ActivityRun] - :param continuation_token: The continuation token for getting the next page of results, if any + :ivar value: Required. List of activity runs. + :vartype value: list[~azure.mgmt.datafactory.models.ActivityRun] + :ivar continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. - :type continuation_token: str + :vartype continuation_token: str """ _validation = { @@ -290,6 +347,13 @@ def __init__( self, **kwargs ): + """ + :keyword value: Required. List of activity runs. + :paramtype value: list[~azure.mgmt.datafactory.models.ActivityRun] + :keyword continuation_token: The continuation token for getting the next page of results, if + any remaining results exist, null otherwise. + :paramtype continuation_token: str + """ super(ActivityRunsQueryResponse, self).__init__(**kwargs) self.value = kwargs['value'] self.continuation_token = kwargs.get('continuation_token', None) @@ -298,8 +362,8 @@ def __init__( class AddDataFlowToDebugSessionResponse(msrest.serialization.Model): """Response body structure for starting data flow debug session. - :param job_version: The ID of data flow debug job version. - :type job_version: str + :ivar job_version: The ID of data flow debug job version. + :vartype job_version: str """ _attribute_map = { @@ -310,6 +374,10 @@ def __init__( self, **kwargs ): + """ + :keyword job_version: The ID of data flow debug job version. + :paramtype job_version: str + """ super(AddDataFlowToDebugSessionResponse, self).__init__(**kwargs) self.job_version = kwargs.get('job_version', None) @@ -317,10 +385,10 @@ def __init__( class AdditionalColumns(msrest.serialization.Model): """Specify the column name and value of additional columns. - :param name: Additional column name. Type: string (or Expression with resultType string). - :type name: any - :param value: Additional column value. Type: string (or Expression with resultType string). - :type value: any + :ivar name: Additional column name. Type: string (or Expression with resultType string). + :vartype name: any + :ivar value: Additional column value. Type: string (or Expression with resultType string). + :vartype value: any """ _attribute_map = { @@ -332,6 +400,12 @@ def __init__( self, **kwargs ): + """ + :keyword name: Additional column name. Type: string (or Expression with resultType string). + :paramtype name: any + :keyword value: Additional column value. Type: string (or Expression with resultType string). + :paramtype value: any + """ super(AdditionalColumns, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.value = kwargs.get('value', None) @@ -345,19 +419,19 @@ class LinkedService(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] """ _validation = { @@ -381,6 +455,19 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + """ super(LinkedService, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.type = 'LinkedService' # type: str @@ -395,48 +482,47 @@ class AmazonMWSLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param endpoint: Required. The endpoint of the Amazon MWS server, (i.e. - mws.amazonservices.com). - :type endpoint: any - :param marketplace_id: Required. The Amazon Marketplace ID you want to retrieve data from. To + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar endpoint: Required. The endpoint of the Amazon MWS server, (i.e. mws.amazonservices.com). + :vartype endpoint: any + :ivar marketplace_id: Required. The Amazon Marketplace ID you want to retrieve data from. To retrieve data from multiple Marketplace IDs, separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2). - :type marketplace_id: any - :param seller_id: Required. The Amazon seller ID. - :type seller_id: any - :param mws_auth_token: The Amazon MWS authentication token. - :type mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase - :param access_key_id: Required. The access key id used to access data. - :type access_key_id: any - :param secret_key: The secret key used to access data. - :type secret_key: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype marketplace_id: any + :ivar seller_id: Required. The Amazon seller ID. + :vartype seller_id: any + :ivar mws_auth_token: The Amazon MWS authentication token. + :vartype mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar access_key_id: Required. The access key id used to access data. + :vartype access_key_id: any + :ivar secret_key: The secret key used to access data. + :vartype secret_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -470,6 +556,48 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword endpoint: Required. The endpoint of the Amazon MWS server, (i.e. + mws.amazonservices.com). + :paramtype endpoint: any + :keyword marketplace_id: Required. The Amazon Marketplace ID you want to retrieve data from. To + retrieve data from multiple Marketplace IDs, separate them with a comma (,). (i.e. + A2EUQ1WTGCTBG2). + :paramtype marketplace_id: any + :keyword seller_id: Required. The Amazon seller ID. + :paramtype seller_id: any + :keyword mws_auth_token: The Amazon MWS authentication token. + :paramtype mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword access_key_id: Required. The access key id used to access data. + :paramtype access_key_id: any + :keyword secret_key: The secret key used to access data. + :paramtype secret_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AmazonMWSLinkedService, self).__init__(**kwargs) self.type = 'AmazonMWS' # type: str self.endpoint = kwargs['endpoint'] @@ -492,28 +620,28 @@ class Dataset(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder """ _validation = { @@ -541,6 +669,28 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + """ super(Dataset, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.type = 'Dataset' # type: str @@ -558,30 +708,30 @@ class AmazonMWSObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -606,6 +756,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(AmazonMWSObjectDataset, self).__init__(**kwargs) self.type = 'AmazonMWSObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -619,23 +793,23 @@ class CopySource(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any + :vartype disable_metrics_collection: any """ _validation = { @@ -659,6 +833,23 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + """ super(CopySource, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.type = 'CopySource' # type: str @@ -676,29 +867,29 @@ class TabularSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -724,6 +915,29 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(TabularSource, self).__init__(**kwargs) self.type = 'TabularSource' # type: str self.query_timeout = kwargs.get('query_timeout', None) @@ -735,32 +949,32 @@ class AmazonMWSSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -783,6 +997,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(AmazonMWSSource, self).__init__(**kwargs) self.type = 'AmazonMWSSource' # type: str self.query = kwargs.get('query', None) @@ -793,28 +1033,28 @@ class AmazonRdsForOracleLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_string: any + :ivar password: The Azure key vault secret reference of password in connection string. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -838,6 +1078,28 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword password: The Azure key vault secret reference of password in connection string. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AmazonRdsForOracleLinkedService, self).__init__(**kwargs) self.type = 'AmazonRdsForOracle' # type: str self.connection_string = kwargs['connection_string'] @@ -848,19 +1110,19 @@ def __init__( class AmazonRdsForOraclePartitionSettings(msrest.serialization.Model): """The settings that will be leveraged for AmazonRdsForOracle source partitioning. - :param partition_names: Names of the physical partitions of AmazonRdsForOracle table. - :type partition_names: any - :param partition_column_name: The name of the column in integer type that will be used for + :ivar partition_names: Names of the physical partitions of AmazonRdsForOracle table. + :vartype partition_names: any + :ivar partition_column_name: The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: any - :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + :vartype partition_column_name: any + :ivar partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_upper_bound: any - :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + :vartype partition_upper_bound: any + :ivar partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_lower_bound: any + :vartype partition_lower_bound: any """ _attribute_map = { @@ -874,6 +1136,21 @@ def __init__( self, **kwargs ): + """ + :keyword partition_names: Names of the physical partitions of AmazonRdsForOracle table. + :paramtype partition_names: any + :keyword partition_column_name: The name of the column in integer type that will be used for + proceeding range partitioning. Type: string (or Expression with resultType string). + :paramtype partition_column_name: any + :keyword partition_upper_bound: The maximum value of column specified in partitionColumnName + that will be used for proceeding range partitioning. Type: string (or Expression with + resultType string). + :paramtype partition_upper_bound: any + :keyword partition_lower_bound: The minimum value of column specified in partitionColumnName + that will be used for proceeding range partitioning. Type: string (or Expression with + resultType string). + :paramtype partition_lower_bound: any + """ super(AmazonRdsForOraclePartitionSettings, self).__init__(**kwargs) self.partition_names = kwargs.get('partition_names', None) self.partition_column_name = kwargs.get('partition_column_name', None) @@ -886,38 +1163,38 @@ class AmazonRdsForOracleSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param oracle_reader_query: AmazonRdsForOracle reader query. Type: string (or Expression with + :vartype disable_metrics_collection: any + :ivar oracle_reader_query: AmazonRdsForOracle reader query. Type: string (or Expression with resultType string). - :type oracle_reader_query: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype oracle_reader_query: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param partition_option: The partition mechanism that will be used for AmazonRdsForOracle read + :vartype query_timeout: any + :ivar partition_option: The partition mechanism that will be used for AmazonRdsForOracle read in parallel. Type: string (or Expression with resultType string). - :type partition_option: any - :param partition_settings: The settings that will be leveraged for AmazonRdsForOracle source + :vartype partition_option: any + :ivar partition_settings: The settings that will be leveraged for AmazonRdsForOracle source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.AmazonRdsForOraclePartitionSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype partition_settings: ~azure.mgmt.datafactory.models.AmazonRdsForOraclePartitionSettings + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -942,6 +1219,39 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword oracle_reader_query: AmazonRdsForOracle reader query. Type: string (or Expression with + resultType string). + :paramtype oracle_reader_query: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword partition_option: The partition mechanism that will be used for AmazonRdsForOracle + read in parallel. Type: string (or Expression with resultType string). + :paramtype partition_option: any + :keyword partition_settings: The settings that will be leveraged for AmazonRdsForOracle source + partitioning. + :paramtype partition_settings: + ~azure.mgmt.datafactory.models.AmazonRdsForOraclePartitionSettings + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(AmazonRdsForOracleSource, self).__init__(**kwargs) self.type = 'AmazonRdsForOracleSource' # type: str self.oracle_reader_query = kwargs.get('oracle_reader_query', None) @@ -956,34 +1266,34 @@ class AmazonRdsForOracleTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param schema_type_properties_schema: The schema name of the AmazonRdsForOracle database. Type: + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar schema_type_properties_schema: The schema name of the AmazonRdsForOracle database. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any - :param table: The table name of the AmazonRdsForOracle database. Type: string (or Expression + :vartype schema_type_properties_schema: any + :ivar table: The table name of the AmazonRdsForOracle database. Type: string (or Expression with resultType string). - :type table: any + :vartype table: any """ _validation = { @@ -1009,6 +1319,34 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword schema_type_properties_schema: The schema name of the AmazonRdsForOracle database. + Type: string (or Expression with resultType string). + :paramtype schema_type_properties_schema: any + :keyword table: The table name of the AmazonRdsForOracle database. Type: string (or Expression + with resultType string). + :paramtype table: any + """ super(AmazonRdsForOracleTableDataset, self).__init__(**kwargs) self.type = 'AmazonRdsForOracleTable' # type: str self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -1020,33 +1358,33 @@ class AmazonRdsForSqlServerLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param user_name: The on-premises Windows authentication user name. Type: string (or Expression + :vartype connection_string: any + :ivar user_name: The on-premises Windows authentication user name. Type: string (or Expression with resultType string). - :type user_name: any - :param password: The on-premises Windows authentication password. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype user_name: any + :ivar password: The on-premises Windows authentication password. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param always_encrypted_settings: Sql always encrypted properties. - :type always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + :vartype encrypted_credential: any + :ivar always_encrypted_settings: Sql always encrypted properties. + :vartype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties """ _validation = { @@ -1072,6 +1410,34 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword user_name: The on-premises Windows authentication user name. Type: string (or + Expression with resultType string). + :paramtype user_name: any + :keyword password: The on-premises Windows authentication password. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword always_encrypted_settings: Sql always encrypted properties. + :paramtype always_encrypted_settings: + ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + """ super(AmazonRdsForSqlServerLinkedService, self).__init__(**kwargs) self.type = 'AmazonRdsForSqlServer' # type: str self.connection_string = kwargs['connection_string'] @@ -1086,46 +1452,46 @@ class AmazonRdsForSqlServerSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: any - :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database - source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression - with resultType string). - :type sql_reader_stored_procedure_name: any - :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + :vartype additional_columns: any + :ivar sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :vartype sql_reader_query: any + :ivar sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. + This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with + resultType string). + :vartype sql_reader_stored_procedure_name: any + :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, + :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: any - :param partition_option: The partition mechanism that will be used for Sql read in parallel. + :ivar produce_additional_types: Which additional types to produce. + :vartype produce_additional_types: any + :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: any - :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings + :vartype partition_option: any + :ivar partition_settings: The settings that will be leveraged for Sql source partitioning. + :vartype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -1153,6 +1519,47 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword sql_reader_query: SQL reader query. Type: string (or Expression with resultType + string). + :paramtype sql_reader_query: any + :keyword sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database + source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression + with resultType string). + :paramtype sql_reader_stored_procedure_name: any + :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :paramtype stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :keyword produce_additional_types: Which additional types to produce. + :paramtype produce_additional_types: any + :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :paramtype partition_option: any + :keyword partition_settings: The settings that will be leveraged for Sql source partitioning. + :paramtype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings + """ super(AmazonRdsForSqlServerSource, self).__init__(**kwargs) self.type = 'AmazonRdsForSqlServerSource' # type: str self.sql_reader_query = kwargs.get('sql_reader_query', None) @@ -1168,34 +1575,34 @@ class AmazonRdsForSqlServerTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param schema_type_properties_schema: The schema name of the SQL Server dataset. Type: string + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar schema_type_properties_schema: The schema name of the SQL Server dataset. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any - :param table: The table name of the SQL Server dataset. Type: string (or Expression with + :vartype schema_type_properties_schema: any + :ivar table: The table name of the SQL Server dataset. Type: string (or Expression with resultType string). - :type table: any + :vartype table: any """ _validation = { @@ -1221,6 +1628,34 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword schema_type_properties_schema: The schema name of the SQL Server dataset. Type: string + (or Expression with resultType string). + :paramtype schema_type_properties_schema: any + :keyword table: The table name of the SQL Server dataset. Type: string (or Expression with + resultType string). + :paramtype table: any + """ super(AmazonRdsForSqlServerTableDataset, self).__init__(**kwargs) self.type = 'AmazonRdsForSqlServerTable' # type: str self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -1232,37 +1667,37 @@ class AmazonRedshiftLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param server: Required. The name of the Amazon Redshift server. Type: string (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar server: Required. The name of the Amazon Redshift server. Type: string (or Expression with resultType string). - :type server: any - :param username: The username of the Amazon Redshift source. Type: string (or Expression with + :vartype server: any + :ivar username: The username of the Amazon Redshift source. Type: string (or Expression with resultType string). - :type username: any - :param password: The password of the Amazon Redshift source. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param database: Required. The database name of the Amazon Redshift source. Type: string (or + :vartype username: any + :ivar password: The password of the Amazon Redshift source. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar database: Required. The database name of the Amazon Redshift source. Type: string (or Expression with resultType string). - :type database: any - :param port: The TCP port number that the Amazon Redshift server uses to listen for client + :vartype database: any + :ivar port: The TCP port number that the Amazon Redshift server uses to listen for client connections. The default value is 5439. Type: integer (or Expression with resultType integer). - :type port: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype port: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -1290,6 +1725,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword server: Required. The name of the Amazon Redshift server. Type: string (or Expression + with resultType string). + :paramtype server: any + :keyword username: The username of the Amazon Redshift source. Type: string (or Expression with + resultType string). + :paramtype username: any + :keyword password: The password of the Amazon Redshift source. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword database: Required. The database name of the Amazon Redshift source. Type: string (or + Expression with resultType string). + :paramtype database: any + :keyword port: The TCP port number that the Amazon Redshift server uses to listen for client + connections. The default value is 5439. Type: integer (or Expression with resultType integer). + :paramtype port: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AmazonRedshiftLinkedService, self).__init__(**kwargs) self.type = 'AmazonRedshift' # type: str self.server = kwargs['server'] @@ -1305,35 +1771,35 @@ class AmazonRedshiftSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: Database query. Type: string (or Expression with resultType string). - :type query: any - :param redshift_unload_settings: The Amazon S3 settings needed for the interim Amazon S3 when + :vartype additional_columns: any + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: any + :ivar redshift_unload_settings: The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. - :type redshift_unload_settings: ~azure.mgmt.datafactory.models.RedshiftUnloadSettings + :vartype redshift_unload_settings: ~azure.mgmt.datafactory.models.RedshiftUnloadSettings """ _validation = { @@ -1357,6 +1823,35 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: any + :keyword redshift_unload_settings: The Amazon S3 settings needed for the interim Amazon S3 when + copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be + unloaded into S3 first and then copied into the targeted sink from the interim S3. + :paramtype redshift_unload_settings: ~azure.mgmt.datafactory.models.RedshiftUnloadSettings + """ super(AmazonRedshiftSource, self).__init__(**kwargs) self.type = 'AmazonRedshiftSource' # type: str self.query = kwargs.get('query', None) @@ -1368,37 +1863,37 @@ class AmazonRedshiftTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param table: The Amazon Redshift table name. Type: string (or Expression with resultType + :vartype table_name: any + :ivar table: The Amazon Redshift table name. Type: string (or Expression with resultType string). - :type table: any - :param schema_type_properties_schema: The Amazon Redshift schema name. Type: string (or + :vartype table: any + :ivar schema_type_properties_schema: The Amazon Redshift schema name. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any + :vartype schema_type_properties_schema: any """ _validation = { @@ -1425,6 +1920,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword table: The Amazon Redshift table name. Type: string (or Expression with resultType + string). + :paramtype table: any + :keyword schema_type_properties_schema: The Amazon Redshift schema name. Type: string (or + Expression with resultType string). + :paramtype schema_type_properties_schema: any + """ super(AmazonRedshiftTableDataset, self).__init__(**kwargs) self.type = 'AmazonRedshiftTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -1437,37 +1963,37 @@ class AmazonS3CompatibleLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param access_key_id: The access key identifier of the Amazon S3 Compatible Identity and Access + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar access_key_id: The access key identifier of the Amazon S3 Compatible Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). - :type access_key_id: any - :param secret_access_key: The secret access key of the Amazon S3 Compatible Identity and Access + :vartype access_key_id: any + :ivar secret_access_key: The secret access key of the Amazon S3 Compatible Identity and Access Management (IAM) user. - :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_url: This value specifies the endpoint to access with the Amazon S3 Compatible + :vartype secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar service_url: This value specifies the endpoint to access with the Amazon S3 Compatible Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). - :type service_url: any - :param force_path_style: If true, use S3 path-style access instead of virtual hosted-style + :vartype service_url: any + :ivar force_path_style: If true, use S3 path-style access instead of virtual hosted-style access. Default value is false. Type: boolean (or Expression with resultType boolean). - :type force_path_style: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype force_path_style: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -1492,6 +2018,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword access_key_id: The access key identifier of the Amazon S3 Compatible Identity and + Access Management (IAM) user. Type: string (or Expression with resultType string). + :paramtype access_key_id: any + :keyword secret_access_key: The secret access key of the Amazon S3 Compatible Identity and + Access Management (IAM) user. + :paramtype secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword service_url: This value specifies the endpoint to access with the Amazon S3 Compatible + Connector. This is an optional property; change it only if you want to try a different service + endpoint or want to switch between https and http. Type: string (or Expression with resultType + string). + :paramtype service_url: any + :keyword force_path_style: If true, use S3 path-style access instead of virtual hosted-style + access. Default value is false. Type: boolean (or Expression with resultType boolean). + :paramtype force_path_style: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AmazonS3CompatibleLinkedService, self).__init__(**kwargs) self.type = 'AmazonS3Compatible' # type: str self.access_key_id = kwargs.get('access_key_id', None) @@ -1509,17 +2066,17 @@ class DatasetLocation(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any + :vartype file_name: any """ _validation = { @@ -1541,6 +2098,17 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + """ super(DatasetLocation, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.type = 'DatasetLocation' # type: str @@ -1553,23 +2121,23 @@ class AmazonS3CompatibleLocation(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any - :param bucket_name: Specify the bucketName of Amazon S3 Compatible. Type: string (or Expression + :vartype file_name: any + :ivar bucket_name: Specify the bucketName of Amazon S3 Compatible. Type: string (or Expression with resultType string). - :type bucket_name: any - :param version: Specify the version of Amazon S3 Compatible. Type: string (or Expression with + :vartype bucket_name: any + :ivar version: Specify the version of Amazon S3 Compatible. Type: string (or Expression with resultType string). - :type version: any + :vartype version: any """ _validation = { @@ -1589,6 +2157,23 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + :keyword bucket_name: Specify the bucketName of Amazon S3 Compatible. Type: string (or + Expression with resultType string). + :paramtype bucket_name: any + :keyword version: Specify the version of Amazon S3 Compatible. Type: string (or Expression with + resultType string). + :paramtype version: any + """ super(AmazonS3CompatibleLocation, self).__init__(**kwargs) self.type = 'AmazonS3CompatibleLocation' # type: str self.bucket_name = kwargs.get('bucket_name', None) @@ -1603,17 +2188,17 @@ class StoreReadSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any + :vartype disable_metrics_collection: any """ _validation = { @@ -1635,6 +2220,17 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + """ super(StoreReadSettings, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.type = 'StoreReadSettings' # type: str @@ -1647,47 +2243,47 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param wildcard_folder_path: Amazon S3 Compatible wildcardFolderPath. Type: string (or + :vartype recursive: any + :ivar wildcard_folder_path: Amazon S3 Compatible wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: any - :param wildcard_file_name: Amazon S3 Compatible wildcardFileName. Type: string (or Expression + :vartype wildcard_folder_path: any + :ivar wildcard_file_name: Amazon S3 Compatible wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: any - :param prefix: The prefix filter for the S3 Compatible object name. Type: string (or Expression + :vartype wildcard_file_name: any + :ivar prefix: The prefix filter for the S3 Compatible object name. Type: string (or Expression with resultType string). - :type prefix: any - :param file_list_path: Point to a text file that lists each file (relative path to the path + :vartype prefix: any + :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype file_list_path: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any - :param delete_files_after_completion: Indicates whether the source files need to be deleted + :vartype partition_root_path: any + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: any - :param modified_datetime_start: The start of file's modified datetime. Type: string (or + :vartype delete_files_after_completion: any + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any + :vartype modified_datetime_end: any """ _validation = { @@ -1715,6 +2311,47 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword wildcard_folder_path: Amazon S3 Compatible wildcardFolderPath. Type: string (or + Expression with resultType string). + :paramtype wildcard_folder_path: any + :keyword wildcard_file_name: Amazon S3 Compatible wildcardFileName. Type: string (or Expression + with resultType string). + :paramtype wildcard_file_name: any + :keyword prefix: The prefix filter for the S3 Compatible object name. Type: string (or + Expression with resultType string). + :paramtype prefix: any + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: any + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + """ super(AmazonS3CompatibleReadSettings, self).__init__(**kwargs) self.type = 'AmazonS3CompatibleReadSettings' # type: str self.recursive = kwargs.get('recursive', None) @@ -1734,50 +2371,50 @@ class AmazonS3Dataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param bucket_name: Required. The name of the Amazon S3 bucket. Type: string (or Expression - with resultType string). - :type bucket_name: any - :param key: The key of the Amazon S3 object. Type: string (or Expression with resultType + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar bucket_name: Required. The name of the Amazon S3 bucket. Type: string (or Expression with + resultType string). + :vartype bucket_name: any + :ivar key: The key of the Amazon S3 object. Type: string (or Expression with resultType string). - :type key: any - :param prefix: The prefix filter for the S3 object name. Type: string (or Expression with + :vartype key: any + :ivar prefix: The prefix filter for the S3 object name. Type: string (or Expression with resultType string). - :type prefix: any - :param version: The version for the S3 object. Type: string (or Expression with resultType + :vartype prefix: any + :ivar version: The version for the S3 object. Type: string (or Expression with resultType string). - :type version: any - :param modified_datetime_start: The start of S3 object's modified datetime. Type: string (or + :vartype version: any + :ivar modified_datetime_start: The start of S3 object's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of S3 object's modified datetime. Type: string (or + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of S3 object's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any - :param format: The format of files. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the Amazon S3 object. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + :vartype modified_datetime_end: any + :ivar format: The format of files. + :vartype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :ivar compression: The data compression method used for the Amazon S3 object. + :vartype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -1810,6 +2447,50 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword bucket_name: Required. The name of the Amazon S3 bucket. Type: string (or Expression + with resultType string). + :paramtype bucket_name: any + :keyword key: The key of the Amazon S3 object. Type: string (or Expression with resultType + string). + :paramtype key: any + :keyword prefix: The prefix filter for the S3 object name. Type: string (or Expression with + resultType string). + :paramtype prefix: any + :keyword version: The version for the S3 object. Type: string (or Expression with resultType + string). + :paramtype version: any + :keyword modified_datetime_start: The start of S3 object's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of S3 object's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + :keyword format: The format of files. + :paramtype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :keyword compression: The data compression method used for the Amazon S3 object. + :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ super(AmazonS3Dataset, self).__init__(**kwargs) self.type = 'AmazonS3Object' # type: str self.bucket_name = kwargs['bucket_name'] @@ -1827,38 +2508,38 @@ class AmazonS3LinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param authentication_type: The authentication type of S3. Allowed value: AccessKey (default) - or TemporarySecurityCredentials. Type: string (or Expression with resultType string). - :type authentication_type: any - :param access_key_id: The access key identifier of the Amazon S3 Identity and Access Management + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar authentication_type: The authentication type of S3. Allowed value: AccessKey (default) or + TemporarySecurityCredentials. Type: string (or Expression with resultType string). + :vartype authentication_type: any + :ivar access_key_id: The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). - :type access_key_id: any - :param secret_access_key: The secret access key of the Amazon S3 Identity and Access Management + :vartype access_key_id: any + :ivar secret_access_key: The secret access key of the Amazon S3 Identity and Access Management (IAM) user. - :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_url: This value specifies the endpoint to access with the S3 Connector. This is + :vartype secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar service_url: This value specifies the endpoint to access with the S3 Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). - :type service_url: any - :param session_token: The session token for the S3 temporary security credential. - :type session_token: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype service_url: any + :ivar session_token: The session token for the S3 temporary security credential. + :vartype session_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -1884,6 +2565,38 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword authentication_type: The authentication type of S3. Allowed value: AccessKey (default) + or TemporarySecurityCredentials. Type: string (or Expression with resultType string). + :paramtype authentication_type: any + :keyword access_key_id: The access key identifier of the Amazon S3 Identity and Access + Management (IAM) user. Type: string (or Expression with resultType string). + :paramtype access_key_id: any + :keyword secret_access_key: The secret access key of the Amazon S3 Identity and Access + Management (IAM) user. + :paramtype secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword service_url: This value specifies the endpoint to access with the S3 Connector. This + is an optional property; change it only if you want to try a different service endpoint or want + to switch between https and http. Type: string (or Expression with resultType string). + :paramtype service_url: any + :keyword session_token: The session token for the S3 temporary security credential. + :paramtype session_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AmazonS3LinkedService, self).__init__(**kwargs) self.type = 'AmazonS3' # type: str self.authentication_type = kwargs.get('authentication_type', None) @@ -1899,23 +2612,23 @@ class AmazonS3Location(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any - :param bucket_name: Specify the bucketName of amazon S3. Type: string (or Expression with + :vartype file_name: any + :ivar bucket_name: Specify the bucketName of amazon S3. Type: string (or Expression with resultType string). - :type bucket_name: any - :param version: Specify the version of amazon S3. Type: string (or Expression with resultType + :vartype bucket_name: any + :ivar version: Specify the version of amazon S3. Type: string (or Expression with resultType string). - :type version: any + :vartype version: any """ _validation = { @@ -1935,6 +2648,23 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + :keyword bucket_name: Specify the bucketName of amazon S3. Type: string (or Expression with + resultType string). + :paramtype bucket_name: any + :keyword version: Specify the version of amazon S3. Type: string (or Expression with resultType + string). + :paramtype version: any + """ super(AmazonS3Location, self).__init__(**kwargs) self.type = 'AmazonS3Location' # type: str self.bucket_name = kwargs.get('bucket_name', None) @@ -1946,47 +2676,47 @@ class AmazonS3ReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or Expression with + :vartype recursive: any + :ivar wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: any - :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or Expression with + :vartype wildcard_folder_path: any + :ivar wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: any - :param prefix: The prefix filter for the S3 object name. Type: string (or Expression with + :vartype wildcard_file_name: any + :ivar prefix: The prefix filter for the S3 object name. Type: string (or Expression with resultType string). - :type prefix: any - :param file_list_path: Point to a text file that lists each file (relative path to the path + :vartype prefix: any + :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype file_list_path: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any - :param delete_files_after_completion: Indicates whether the source files need to be deleted + :vartype partition_root_path: any + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: any - :param modified_datetime_start: The start of file's modified datetime. Type: string (or + :vartype delete_files_after_completion: any + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any + :vartype modified_datetime_end: any """ _validation = { @@ -2014,6 +2744,47 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or Expression with + resultType string). + :paramtype wildcard_folder_path: any + :keyword wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or Expression with + resultType string). + :paramtype wildcard_file_name: any + :keyword prefix: The prefix filter for the S3 object name. Type: string (or Expression with + resultType string). + :paramtype prefix: any + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: any + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + """ super(AmazonS3ReadSettings, self).__init__(**kwargs) self.type = 'AmazonS3ReadSettings' # type: str self.recursive = kwargs.get('recursive', None) @@ -2036,19 +2807,19 @@ class ControlActivity(Activity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] """ _validation = { @@ -2073,6 +2844,19 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + """ super(ControlActivity, self).__init__(**kwargs) self.type = 'Container' # type: str @@ -2082,23 +2866,23 @@ class AppendVariableActivity(ControlActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param variable_name: Name of the variable whose value needs to be appended to. - :type variable_name: str - :param value: Value to be appended. Could be a static value or Expression. - :type value: any + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar variable_name: Name of the variable whose value needs to be appended to. + :vartype variable_name: str + :ivar value: Value to be appended. Could be a static value or Expression. + :vartype value: any """ _validation = { @@ -2121,6 +2905,23 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword variable_name: Name of the variable whose value needs to be appended to. + :paramtype variable_name: str + :keyword value: Value to be appended. Could be a static value or Expression. + :paramtype value: any + """ super(AppendVariableActivity, self).__init__(**kwargs) self.type = 'AppendVariable' # type: str self.variable_name = kwargs.get('variable_name', None) @@ -2148,6 +2949,8 @@ def __init__( self, **kwargs ): + """ + """ super(ArmIdWrapper, self).__init__(**kwargs) self.id = None @@ -2157,35 +2960,35 @@ class AvroDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param location: The location of the avro storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param avro_compression_codec: The data avroCompressionCodec. Type: string (or Expression with + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar location: The location of the avro storage. + :vartype location: ~azure.mgmt.datafactory.models.DatasetLocation + :ivar avro_compression_codec: The data avroCompressionCodec. Type: string (or Expression with resultType string). - :type avro_compression_codec: any - :param avro_compression_level: - :type avro_compression_level: int + :vartype avro_compression_codec: any + :ivar avro_compression_level: + :vartype avro_compression_level: int """ _validation = { @@ -2213,6 +3016,35 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword location: The location of the avro storage. + :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation + :keyword avro_compression_codec: The data avroCompressionCodec. Type: string (or Expression + with resultType string). + :paramtype avro_compression_codec: any + :keyword avro_compression_level: + :paramtype avro_compression_level: int + """ super(AvroDataset, self).__init__(**kwargs) self.type = 'Avro' # type: str self.location = kwargs.get('location', None) @@ -2228,15 +3060,15 @@ class DatasetStorageFormat(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage format.Constant filled by server. - :type type: str - :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: any - :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: any + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage format.Constant filled by server. + :vartype type: str + :ivar serializer: Serializer. Type: string (or Expression with resultType string). + :vartype serializer: any + :ivar deserializer: Deserializer. Type: string (or Expression with resultType string). + :vartype deserializer: any """ _validation = { @@ -2258,6 +3090,15 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword serializer: Serializer. Type: string (or Expression with resultType string). + :paramtype serializer: any + :keyword deserializer: Deserializer. Type: string (or Expression with resultType string). + :paramtype deserializer: any + """ super(DatasetStorageFormat, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.type = 'DatasetStorageFormat' # type: str @@ -2270,15 +3111,15 @@ class AvroFormat(DatasetStorageFormat): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage format.Constant filled by server. - :type type: str - :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: any - :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: any + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage format.Constant filled by server. + :vartype type: str + :ivar serializer: Serializer. Type: string (or Expression with resultType string). + :vartype serializer: any + :ivar deserializer: Deserializer. Type: string (or Expression with resultType string). + :vartype deserializer: any """ _validation = { @@ -2296,6 +3137,15 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword serializer: Serializer. Type: string (or Expression with resultType string). + :paramtype serializer: any + :keyword deserializer: Deserializer. Type: string (or Expression with resultType string). + :paramtype deserializer: any + """ super(AvroFormat, self).__init__(**kwargs) self.type = 'AvroFormat' # type: str @@ -2308,29 +3158,29 @@ class CopySink(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any + :vartype disable_metrics_collection: any """ _validation = { @@ -2356,6 +3206,29 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + """ super(CopySink, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.type = 'CopySink' # type: str @@ -2372,33 +3245,33 @@ class AvroSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: Avro store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: Avro format settings. - :type format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings + :vartype disable_metrics_collection: any + :ivar store_settings: Avro store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :ivar format_settings: Avro format settings. + :vartype format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings """ _validation = { @@ -2422,6 +3295,33 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: Avro store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :keyword format_settings: Avro format settings. + :paramtype format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings + """ super(AvroSink, self).__init__(**kwargs) self.type = 'AvroSink' # type: str self.store_settings = kwargs.get('store_settings', None) @@ -2433,28 +3333,28 @@ class AvroSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: Avro store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype disable_metrics_collection: any + :ivar store_settings: Avro store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -2476,6 +3376,28 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: Avro store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(AvroSource, self).__init__(**kwargs) self.type = 'AvroSource' # type: str self.store_settings = kwargs.get('store_settings', None) @@ -2490,11 +3412,11 @@ class FormatWriteSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str """ _validation = { @@ -2514,6 +3436,11 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(FormatWriteSettings, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.type = 'FormatWriteSettings' # type: str @@ -2524,22 +3451,22 @@ class AvroWriteSettings(FormatWriteSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param record_name: Top level record name in write result, which is required in AVRO spec. - :type record_name: str - :param record_namespace: Record namespace in the write result. - :type record_namespace: str - :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str + :ivar record_name: Top level record name in write result, which is required in AVRO spec. + :vartype record_name: str + :ivar record_namespace: Record namespace in the write result. + :vartype record_namespace: str + :ivar max_rows_per_file: Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). - :type max_rows_per_file: any - :param file_name_prefix: Specifies the file name pattern + :vartype max_rows_per_file: any + :ivar file_name_prefix: Specifies the file name pattern :code:``_:code:``.:code:`` when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). - :type file_name_prefix: any + :vartype file_name_prefix: any """ _validation = { @@ -2559,6 +3486,22 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword record_name: Top level record name in write result, which is required in AVRO spec. + :paramtype record_name: str + :keyword record_namespace: Record namespace in the write result. + :paramtype record_namespace: str + :keyword max_rows_per_file: Limit the written file's row count to be smaller than or equal to + the specified count. Type: integer (or Expression with resultType integer). + :paramtype max_rows_per_file: any + :keyword file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :paramtype file_name_prefix: any + """ super(AvroWriteSettings, self).__init__(**kwargs) self.type = 'AvroWriteSettings' # type: str self.record_name = kwargs.get('record_name', None) @@ -2575,8 +3518,8 @@ class CustomSetupBase(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. The type of custom setup.Constant filled by server. - :type type: str + :ivar type: Required. The type of custom setup.Constant filled by server. + :vartype type: str """ _validation = { @@ -2595,6 +3538,8 @@ def __init__( self, **kwargs ): + """ + """ super(CustomSetupBase, self).__init__(**kwargs) self.type = None # type: Optional[str] @@ -2604,10 +3549,10 @@ class AzPowerShellSetup(CustomSetupBase): All required parameters must be populated in order to send to Azure. - :param type: Required. The type of custom setup.Constant filled by server. - :type type: str - :param version: Required. The required version of Azure PowerShell to install. - :type version: str + :ivar type: Required. The type of custom setup.Constant filled by server. + :vartype type: str + :ivar version: Required. The required version of Azure PowerShell to install. + :vartype version: str """ _validation = { @@ -2624,6 +3569,10 @@ def __init__( self, **kwargs ): + """ + :keyword version: Required. The required version of Azure PowerShell to install. + :paramtype version: str + """ super(AzPowerShellSetup, self).__init__(**kwargs) self.type = 'AzPowerShellSetup' # type: str self.version = kwargs['version'] @@ -2634,38 +3583,38 @@ class AzureBatchLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param account_name: Required. The Azure Batch account name. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar account_name: Required. The Azure Batch account name. Type: string (or Expression with resultType string). - :type account_name: any - :param access_key: The Azure Batch account access key. - :type access_key: ~azure.mgmt.datafactory.models.SecretBase - :param batch_uri: Required. The Azure Batch URI. Type: string (or Expression with resultType + :vartype account_name: any + :ivar access_key: The Azure Batch account access key. + :vartype access_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar batch_uri: Required. The Azure Batch URI. Type: string (or Expression with resultType string). - :type batch_uri: any - :param pool_name: Required. The Azure Batch pool name. Type: string (or Expression with + :vartype batch_uri: any + :ivar pool_name: Required. The Azure Batch pool name. Type: string (or Expression with resultType string). - :type pool_name: any - :param linked_service_name: Required. The Azure Storage linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype pool_name: any + :ivar linked_service_name: Required. The Azure Storage linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype encrypted_credential: any + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -2696,6 +3645,38 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword account_name: Required. The Azure Batch account name. Type: string (or Expression with + resultType string). + :paramtype account_name: any + :keyword access_key: The Azure Batch account access key. + :paramtype access_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword batch_uri: Required. The Azure Batch URI. Type: string (or Expression with resultType + string). + :paramtype batch_uri: any + :keyword pool_name: Required. The Azure Batch pool name. Type: string (or Expression with + resultType string). + :paramtype pool_name: any + :keyword linked_service_name: Required. The Azure Storage linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(AzureBatchLinkedService, self).__init__(**kwargs) self.type = 'AzureBatch' # type: str self.account_name = kwargs['account_name'] @@ -2712,47 +3693,47 @@ class AzureBlobDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param folder_path: The path of the Azure Blob storage. Type: string (or Expression with + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar folder_path: The path of the Azure Blob storage. Type: string (or Expression with resultType string). - :type folder_path: any - :param table_root_location: The root of blob path. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar table_root_location: The root of blob path. Type: string (or Expression with resultType string). - :type table_root_location: any - :param file_name: The name of the Azure Blob. Type: string (or Expression with resultType + :vartype table_root_location: any + :ivar file_name: The name of the Azure Blob. Type: string (or Expression with resultType string). - :type file_name: any - :param modified_datetime_start: The start of Azure Blob's modified datetime. Type: string (or + :vartype file_name: any + :ivar modified_datetime_start: The start of Azure Blob's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of Azure Blob's modified datetime. Type: string (or + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of Azure Blob's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any - :param format: The format of the Azure Blob storage. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the blob storage. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + :vartype modified_datetime_end: any + :ivar format: The format of the Azure Blob storage. + :vartype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :ivar compression: The data compression method used for the blob storage. + :vartype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -2783,6 +3764,47 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword folder_path: The path of the Azure Blob storage. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword table_root_location: The root of blob path. Type: string (or Expression with + resultType string). + :paramtype table_root_location: any + :keyword file_name: The name of the Azure Blob. Type: string (or Expression with resultType + string). + :paramtype file_name: any + :keyword modified_datetime_start: The start of Azure Blob's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of Azure Blob's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + :keyword format: The format of the Azure Blob storage. + :paramtype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :keyword compression: The data compression method used for the blob storage. + :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ super(AzureBlobDataset, self).__init__(**kwargs) self.type = 'AzureBlob' # type: str self.folder_path = kwargs.get('folder_path', None) @@ -2799,38 +3821,38 @@ class AzureBlobFSDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. Type: string (or + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar folder_path: The path of the Azure Data Lake Storage Gen2 storage. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: The name of the Azure Data Lake Storage Gen2. Type: string (or Expression - with resultType string). - :type file_name: any - :param format: The format of the Azure Data Lake Storage Gen2 storage. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the blob storage. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + :vartype folder_path: any + :ivar file_name: The name of the Azure Data Lake Storage Gen2. Type: string (or Expression with + resultType string). + :vartype file_name: any + :ivar format: The format of the Azure Data Lake Storage Gen2 storage. + :vartype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :ivar compression: The data compression method used for the blob storage. + :vartype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -2858,6 +3880,38 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword folder_path: The path of the Azure Data Lake Storage Gen2 storage. Type: string (or + Expression with resultType string). + :paramtype folder_path: any + :keyword file_name: The name of the Azure Data Lake Storage Gen2. Type: string (or Expression + with resultType string). + :paramtype file_name: any + :keyword format: The format of the Azure Data Lake Storage Gen2 storage. + :paramtype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :keyword compression: The data compression method used for the blob storage. + :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ super(AzureBlobFSDataset, self).__init__(**kwargs) self.type = 'AzureBlobFSFile' # type: str self.folder_path = kwargs.get('folder_path', None) @@ -2871,44 +3925,44 @@ class AzureBlobFSLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar url: Required. Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). - :type url: any - :param account_key: Account key for the Azure Data Lake Storage Gen2 service. Type: string (or + :vartype url: any + :ivar account_key: Account key for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). - :type account_key: any - :param service_principal_id: The ID of the application used to authenticate against the Azure + :vartype account_key: any + :ivar service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: The Key of the application used to authenticate against the Azure + :vartype service_principal_id: any + :ivar service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Storage Gen2 account. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: any - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string + (or Expression with resultType string). + :vartype tenant: any + :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype azure_cloud_type: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype encrypted_credential: any + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -2937,6 +3991,44 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword url: Required. Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or + Expression with resultType string). + :paramtype url: any + :keyword account_key: Account key for the Azure Data Lake Storage Gen2 service. Type: string + (or Expression with resultType string). + :paramtype account_key: any + :keyword service_principal_id: The ID of the application used to authenticate against the Azure + Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: The Key of the application used to authenticate against the + Azure Data Lake Storage Gen2 account. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :paramtype tenant: any + :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. + Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is + the data factory regions’ cloud type. Type: string (or Expression with resultType string). + :paramtype azure_cloud_type: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(AzureBlobFSLinkedService, self).__init__(**kwargs) self.type = 'AzureBlobFS' # type: str self.url = kwargs['url'] @@ -2954,20 +4046,20 @@ class AzureBlobFSLocation(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any - :param file_system: Specify the fileSystem of azure blobFS. Type: string (or Expression with + :vartype file_name: any + :ivar file_system: Specify the fileSystem of azure blobFS. Type: string (or Expression with resultType string). - :type file_system: any + :vartype file_system: any """ _validation = { @@ -2986,6 +4078,20 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + :keyword file_system: Specify the fileSystem of azure blobFS. Type: string (or Expression with + resultType string). + :paramtype file_system: any + """ super(AzureBlobFSLocation, self).__init__(**kwargs) self.type = 'AzureBlobFSLocation' # type: str self.file_system = kwargs.get('file_system', None) @@ -2996,44 +4102,44 @@ class AzureBlobFSReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string (or Expression with + :vartype recursive: any + :ivar wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: any - :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or Expression with + :vartype wildcard_folder_path: any + :ivar wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: any - :param file_list_path: Point to a text file that lists each file (relative path to the path + :vartype wildcard_file_name: any + :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype file_list_path: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any - :param delete_files_after_completion: Indicates whether the source files need to be deleted + :vartype partition_root_path: any + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: any - :param modified_datetime_start: The start of file's modified datetime. Type: string (or + :vartype delete_files_after_completion: any + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any + :vartype modified_datetime_end: any """ _validation = { @@ -3060,6 +4166,44 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string (or Expression + with resultType string). + :paramtype wildcard_folder_path: any + :keyword wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or Expression with + resultType string). + :paramtype wildcard_file_name: any + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: any + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + """ super(AzureBlobFSReadSettings, self).__init__(**kwargs) self.type = 'AzureBlobFSReadSettings' # type: str self.recursive = kwargs.get('recursive', None) @@ -3078,34 +4222,34 @@ class AzureBlobFSSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: any - :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects + :vartype disable_metrics_collection: any + :ivar copy_behavior: The type of copy behavior for copy sink. + :vartype copy_behavior: any + :ivar metadata: Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). - :type metadata: list[~azure.mgmt.datafactory.models.MetadataItem] + :vartype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] """ _validation = { @@ -3129,6 +4273,34 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword copy_behavior: The type of copy behavior for copy sink. + :paramtype copy_behavior: any + :keyword metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :paramtype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] + """ super(AzureBlobFSSink, self).__init__(**kwargs) self.type = 'AzureBlobFSSink' # type: str self.copy_behavior = kwargs.get('copy_behavior', None) @@ -3140,32 +4312,32 @@ class AzureBlobFSSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType + :vartype disable_metrics_collection: any + :ivar treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). - :type treat_empty_as_null: any - :param skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or + :vartype treat_empty_as_null: any + :ivar skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). - :type skip_header_line_count: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype skip_header_line_count: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any + :vartype recursive: any """ _validation = { @@ -3188,6 +4360,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType + boolean). + :paramtype treat_empty_as_null: any + :keyword skip_header_line_count: Number of header lines to skip from each blob. Type: integer + (or Expression with resultType integer). + :paramtype skip_header_line_count: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + """ super(AzureBlobFSSource, self).__init__(**kwargs) self.type = 'AzureBlobFSSource' # type: str self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) @@ -3203,19 +4401,19 @@ class StoreWriteSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: any + :vartype disable_metrics_collection: any + :ivar copy_behavior: The type of copy behavior for copy sink. + :vartype copy_behavior: any """ _validation = { @@ -3238,6 +4436,19 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword copy_behavior: The type of copy behavior for copy sink. + :paramtype copy_behavior: any + """ super(StoreWriteSettings, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.type = 'StoreWriteSettings' # type: str @@ -3251,22 +4462,22 @@ class AzureBlobFSWriteSettings(StoreWriteSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: any - :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer + :vartype disable_metrics_collection: any + :ivar copy_behavior: The type of copy behavior for copy sink. + :vartype copy_behavior: any + :ivar block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). - :type block_size_in_mb: any + :vartype block_size_in_mb: any """ _validation = { @@ -3286,6 +4497,22 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword copy_behavior: The type of copy behavior for copy sink. + :paramtype copy_behavior: any + :keyword block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: + integer (or Expression with resultType integer). + :paramtype block_size_in_mb: any + """ super(AzureBlobFSWriteSettings, self).__init__(**kwargs) self.type = 'AzureBlobFSWriteSettings' # type: str self.block_size_in_mb = kwargs.get('block_size_in_mb', None) @@ -3296,56 +4523,56 @@ class AzureBlobStorageLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: The connection string. It is mutually exclusive with sasUri, + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: The connection string. It is mutually exclusive with sasUri, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually exclusive with + :vartype connection_string: any + :ivar account_key: The Azure key vault secret reference of accountKey in connection string. + :vartype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually exclusive with connectionString, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: any - :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_endpoint: Blob service endpoint of the Azure Blob Storage resource. It is + :vartype sas_uri: any + :ivar sas_token: The Azure key vault secret reference of sasToken in sas uri. + :vartype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar service_endpoint: Blob service endpoint of the Azure Blob Storage resource. It is mutually exclusive with connectionString, sasUri property. - :type service_endpoint: str - :param service_principal_id: The ID of the service principal used to authenticate against Azure + :vartype service_endpoint: str + :ivar service_principal_id: The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: The key of the service principal used to authenticate against + :vartype service_principal_id: any + :ivar service_principal_key: The key of the service principal used to authenticate against Azure SQL Data Warehouse. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: any - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string + (or Expression with resultType string). + :vartype tenant: any + :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: any - :param account_kind: Specify the kind of your storage account. Allowed values are: Storage + :vartype azure_cloud_type: any + :ivar account_kind: Specify the kind of your storage account. Allowed values are: Storage (general purpose v1), StorageV2 (general purpose v2), BlobStorage, or BlockBlobStorage. Type: string (or Expression with resultType string). - :type account_kind: str - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype account_kind: str + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype encrypted_credential: str + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -3377,6 +4604,56 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: The connection string. It is mutually exclusive with sasUri, + serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword account_key: The Azure key vault secret reference of accountKey in connection string. + :paramtype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually exclusive with + connectionString, serviceEndpoint property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype sas_uri: any + :keyword sas_token: The Azure key vault secret reference of sasToken in sas uri. + :paramtype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword service_endpoint: Blob service endpoint of the Azure Blob Storage resource. It is + mutually exclusive with connectionString, sasUri property. + :paramtype service_endpoint: str + :keyword service_principal_id: The ID of the service principal used to authenticate against + Azure SQL Data Warehouse. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: The key of the service principal used to authenticate against + Azure SQL Data Warehouse. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :paramtype tenant: any + :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. + Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is + the data factory regions’ cloud type. Type: string (or Expression with resultType string). + :paramtype azure_cloud_type: any + :keyword account_kind: Specify the kind of your storage account. Allowed values are: Storage + (general purpose v1), StorageV2 (general purpose v2), BlobStorage, or BlockBlobStorage. Type: + string (or Expression with resultType string). + :paramtype account_kind: str + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: str + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(AzureBlobStorageLinkedService, self).__init__(**kwargs) self.type = 'AzureBlobStorage' # type: str self.connection_string = kwargs.get('connection_string', None) @@ -3398,20 +4675,20 @@ class AzureBlobStorageLocation(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any - :param container: Specify the container of azure blob. Type: string (or Expression with + :vartype file_name: any + :ivar container: Specify the container of azure blob. Type: string (or Expression with resultType string). - :type container: any + :vartype container: any """ _validation = { @@ -3430,6 +4707,20 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + :keyword container: Specify the container of azure blob. Type: string (or Expression with + resultType string). + :paramtype container: any + """ super(AzureBlobStorageLocation, self).__init__(**kwargs) self.type = 'AzureBlobStorageLocation' # type: str self.container = kwargs.get('container', None) @@ -3440,47 +4731,47 @@ class AzureBlobStorageReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string (or Expression with + :vartype recursive: any + :ivar wildcard_folder_path: Azure blob wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: any - :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or Expression with + :vartype wildcard_folder_path: any + :ivar wildcard_file_name: Azure blob wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: any - :param prefix: The prefix filter for the Azure Blob name. Type: string (or Expression with + :vartype wildcard_file_name: any + :ivar prefix: The prefix filter for the Azure Blob name. Type: string (or Expression with resultType string). - :type prefix: any - :param file_list_path: Point to a text file that lists each file (relative path to the path + :vartype prefix: any + :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype file_list_path: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any - :param delete_files_after_completion: Indicates whether the source files need to be deleted + :vartype partition_root_path: any + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: any - :param modified_datetime_start: The start of file's modified datetime. Type: string (or + :vartype delete_files_after_completion: any + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any + :vartype modified_datetime_end: any """ _validation = { @@ -3508,6 +4799,47 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword wildcard_folder_path: Azure blob wildcardFolderPath. Type: string (or Expression with + resultType string). + :paramtype wildcard_folder_path: any + :keyword wildcard_file_name: Azure blob wildcardFileName. Type: string (or Expression with + resultType string). + :paramtype wildcard_file_name: any + :keyword prefix: The prefix filter for the Azure Blob name. Type: string (or Expression with + resultType string). + :paramtype prefix: any + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: any + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + """ super(AzureBlobStorageReadSettings, self).__init__(**kwargs) self.type = 'AzureBlobStorageReadSettings' # type: str self.recursive = kwargs.get('recursive', None) @@ -3527,22 +4859,22 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: any - :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer + :vartype disable_metrics_collection: any + :ivar copy_behavior: The type of copy behavior for copy sink. + :vartype copy_behavior: any + :ivar block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). - :type block_size_in_mb: any + :vartype block_size_in_mb: any """ _validation = { @@ -3562,6 +4894,22 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword copy_behavior: The type of copy behavior for copy sink. + :paramtype copy_behavior: any + :keyword block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: + integer (or Expression with resultType integer). + :paramtype block_size_in_mb: any + """ super(AzureBlobStorageWriteSettings, self).__init__(**kwargs) self.type = 'AzureBlobStorageWriteSettings' # type: str self.block_size_in_mb = kwargs.get('block_size_in_mb', None) @@ -3572,33 +4920,33 @@ class AzureDatabricksDeltaLakeDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table: The name of delta table. Type: string (or Expression with resultType string). - :type table: any - :param database: The database name of delta table. Type: string (or Expression with resultType + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table: The name of delta table. Type: string (or Expression with resultType string). + :vartype table: any + :ivar database: The database name of delta table. Type: string (or Expression with resultType string). - :type database: any + :vartype database: any """ _validation = { @@ -3624,6 +4972,33 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table: The name of delta table. Type: string (or Expression with resultType string). + :paramtype table: any + :keyword database: The database name of delta table. Type: string (or Expression with + resultType string). + :paramtype database: any + """ super(AzureDatabricksDeltaLakeDataset, self).__init__(**kwargs) self.type = 'AzureDatabricksDeltaLakeDataset' # type: str self.table = kwargs.get('table', None) @@ -3638,11 +5013,11 @@ class ExportSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The export setting type.Constant filled by server. - :type type: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. The export setting type.Constant filled by server. + :vartype type: str """ _validation = { @@ -3662,6 +5037,11 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(ExportSettings, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.type = 'ExportSettings' # type: str @@ -3672,17 +5052,17 @@ class AzureDatabricksDeltaLakeExportCommand(ExportSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The export setting type.Constant filled by server. - :type type: str - :param date_format: Specify the date format for the csv in Azure Databricks Delta Lake Copy. + :vartype additional_properties: dict[str, any] + :ivar type: Required. The export setting type.Constant filled by server. + :vartype type: str + :ivar date_format: Specify the date format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). - :type date_format: any - :param timestamp_format: Specify the timestamp format for the csv in Azure Databricks Delta - Lake Copy. Type: string (or Expression with resultType string). - :type timestamp_format: any + :vartype date_format: any + :ivar timestamp_format: Specify the timestamp format for the csv in Azure Databricks Delta Lake + Copy. Type: string (or Expression with resultType string). + :vartype timestamp_format: any """ _validation = { @@ -3700,6 +5080,17 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword date_format: Specify the date format for the csv in Azure Databricks Delta Lake Copy. + Type: string (or Expression with resultType string). + :paramtype date_format: any + :keyword timestamp_format: Specify the timestamp format for the csv in Azure Databricks Delta + Lake Copy. Type: string (or Expression with resultType string). + :paramtype timestamp_format: any + """ super(AzureDatabricksDeltaLakeExportCommand, self).__init__(**kwargs) self.type = 'AzureDatabricksDeltaLakeExportCommand' # type: str self.date_format = kwargs.get('date_format', None) @@ -3714,11 +5105,11 @@ class ImportSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The import setting type.Constant filled by server. - :type type: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. The import setting type.Constant filled by server. + :vartype type: str """ _validation = { @@ -3738,6 +5129,11 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(ImportSettings, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.type = 'ImportSettings' # type: str @@ -3748,17 +5144,17 @@ class AzureDatabricksDeltaLakeImportCommand(ImportSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The import setting type.Constant filled by server. - :type type: str - :param date_format: Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: + :vartype additional_properties: dict[str, any] + :ivar type: Required. The import setting type.Constant filled by server. + :vartype type: str + :ivar date_format: Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). - :type date_format: any - :param timestamp_format: Specify the timestamp format for csv in Azure Databricks Delta Lake + :vartype date_format: any + :ivar timestamp_format: Specify the timestamp format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). - :type timestamp_format: any + :vartype timestamp_format: any """ _validation = { @@ -3776,6 +5172,17 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword date_format: Specify the date format for csv in Azure Databricks Delta Lake Copy. + Type: string (or Expression with resultType string). + :paramtype date_format: any + :keyword timestamp_format: Specify the timestamp format for csv in Azure Databricks Delta Lake + Copy. Type: string (or Expression with resultType string). + :paramtype timestamp_format: any + """ super(AzureDatabricksDeltaLakeImportCommand, self).__init__(**kwargs) self.type = 'AzureDatabricksDeltaLakeImportCommand' # type: str self.date_format = kwargs.get('date_format', None) @@ -3787,33 +5194,33 @@ class AzureDatabricksDeltaLakeLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). - :type domain: any - :param access_token: Access token for databricks REST API. Refer to + :vartype domain: any + :ivar access_token: Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or AzureKeyVaultSecretReference. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param cluster_id: The id of an existing interactive cluster that will be used for all runs of + :vartype access_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar cluster_id: The id of an existing interactive cluster that will be used for all runs of this job. Type: string (or Expression with resultType string). - :type cluster_id: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype cluster_id: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -3838,6 +5245,33 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). + :paramtype domain: any + :keyword access_token: Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype access_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword cluster_id: The id of an existing interactive cluster that will be used for all runs + of this job. Type: string (or Expression with resultType string). + :paramtype cluster_id: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AzureDatabricksDeltaLakeLinkedService, self).__init__(**kwargs) self.type = 'AzureDatabricksDeltaLake' # type: str self.domain = kwargs['domain'] @@ -3851,34 +5285,34 @@ class AzureDatabricksDeltaLakeSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + :vartype disable_metrics_collection: any + :ivar pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: any - :param import_settings: Azure Databricks Delta Lake import settings. - :type import_settings: ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeImportCommand + :vartype pre_copy_script: any + :ivar import_settings: Azure Databricks Delta Lake import settings. + :vartype import_settings: ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeImportCommand """ _validation = { @@ -3902,6 +5336,35 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :paramtype pre_copy_script: any + :keyword import_settings: Azure Databricks Delta Lake import settings. + :paramtype import_settings: + ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeImportCommand + """ super(AzureDatabricksDeltaLakeSink, self).__init__(**kwargs) self.type = 'AzureDatabricksDeltaLakeSink' # type: str self.pre_copy_script = kwargs.get('pre_copy_script', None) @@ -3913,28 +5376,28 @@ class AzureDatabricksDeltaLakeSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with - resultType string). - :type query: any - :param export_settings: Azure Databricks Delta Lake export settings. - :type export_settings: ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeExportCommand + :vartype disable_metrics_collection: any + :ivar query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with resultType + string). + :vartype query: any + :ivar export_settings: Azure Databricks Delta Lake export settings. + :vartype export_settings: ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeExportCommand """ _validation = { @@ -3956,6 +5419,29 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with + resultType string). + :paramtype query: any + :keyword export_settings: Azure Databricks Delta Lake export settings. + :paramtype export_settings: + ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeExportCommand + """ super(AzureDatabricksDeltaLakeSource, self).__init__(**kwargs) self.type = 'AzureDatabricksDeltaLakeSource' # type: str self.query = kwargs.get('query', None) @@ -3967,86 +5453,86 @@ class AzureDatabricksLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). - :type domain: any - :param access_token: Access token for databricks REST API. Refer to + :vartype domain: any + :ivar access_token: Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param authentication: Required to specify MSI, if using Workspace resource id for databricks + :vartype access_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar authentication: Required to specify MSI, if using Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). - :type authentication: any - :param workspace_resource_id: Workspace resource id for databricks REST API. Type: string (or + :vartype authentication: any + :ivar workspace_resource_id: Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). - :type workspace_resource_id: any - :param existing_cluster_id: The id of an existing interactive cluster that will be used for all + :vartype workspace_resource_id: any + :ivar existing_cluster_id: The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string). - :type existing_cluster_id: any - :param instance_pool_id: The id of an existing instance pool that will be used for all runs of + :vartype existing_cluster_id: any + :ivar instance_pool_id: The id of an existing instance pool that will be used for all runs of this activity. Type: string (or Expression with resultType string). - :type instance_pool_id: any - :param new_cluster_version: If not using an existing interactive cluster, this specifies the + :vartype instance_pool_id: any + :ivar new_cluster_version: If not using an existing interactive cluster, this specifies the Spark version of a new job cluster or instance pool nodes created for each run of this activity. Required if instancePoolId is specified. Type: string (or Expression with resultType string). - :type new_cluster_version: any - :param new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies + :vartype new_cluster_version: any + :ivar new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies the number of worker nodes to use for the new job cluster or instance pool. For new job clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto-scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is specified. Type: string (or Expression with resultType string). - :type new_cluster_num_of_worker: any - :param new_cluster_node_type: The node type of the new job cluster. This property is required - if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is + :vartype new_cluster_num_of_worker: any + :ivar new_cluster_node_type: The node type of the new job cluster. This property is required if + newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this property is ignored. Type: string (or Expression with resultType string). - :type new_cluster_node_type: any - :param new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value + :vartype new_cluster_node_type: any + :ivar new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value pairs. - :type new_cluster_spark_conf: dict[str, any] - :param new_cluster_spark_env_vars: A set of optional, user-specified Spark environment - variables key-value pairs. - :type new_cluster_spark_env_vars: dict[str, any] - :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored + :vartype new_cluster_spark_conf: dict[str, any] + :ivar new_cluster_spark_env_vars: A set of optional, user-specified Spark environment variables + key-value pairs. + :vartype new_cluster_spark_env_vars: dict[str, any] + :ivar new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored in instance pool configurations. - :type new_cluster_custom_tags: dict[str, any] - :param new_cluster_log_destination: Specify a location to deliver Spark driver, worker, and + :vartype new_cluster_custom_tags: dict[str, any] + :ivar new_cluster_log_destination: Specify a location to deliver Spark driver, worker, and event logs. Type: string (or Expression with resultType string). - :type new_cluster_log_destination: any - :param new_cluster_driver_node_type: The driver node type for the new job cluster. This - property is ignored in instance pool configurations. Type: string (or Expression with - resultType string). - :type new_cluster_driver_node_type: any - :param new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: + :vartype new_cluster_log_destination: any + :ivar new_cluster_driver_node_type: The driver node type for the new job cluster. This property + is ignored in instance pool configurations. Type: string (or Expression with resultType + string). + :vartype new_cluster_driver_node_type: any + :ivar new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: array of strings (or Expression with resultType array of strings). - :type new_cluster_init_scripts: any - :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This + :vartype new_cluster_init_scripts: any + :ivar new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This property is now ignored, and takes the default elastic disk behavior in Databricks (elastic disks are always enabled). Type: boolean (or Expression with resultType boolean). - :type new_cluster_enable_elastic_disk: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype new_cluster_enable_elastic_disk: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param policy_id: The policy id for limiting the ability to configure clusters based on a user + :vartype encrypted_credential: any + :ivar policy_id: The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string (or Expression with resultType string). - :type policy_id: any - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype policy_id: any + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -4086,6 +5572,86 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). + :paramtype domain: any + :keyword access_token: Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression + with resultType string). + :paramtype access_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword authentication: Required to specify MSI, if using Workspace resource id for databricks + REST API. Type: string (or Expression with resultType string). + :paramtype authentication: any + :keyword workspace_resource_id: Workspace resource id for databricks REST API. Type: string (or + Expression with resultType string). + :paramtype workspace_resource_id: any + :keyword existing_cluster_id: The id of an existing interactive cluster that will be used for + all runs of this activity. Type: string (or Expression with resultType string). + :paramtype existing_cluster_id: any + :keyword instance_pool_id: The id of an existing instance pool that will be used for all runs + of this activity. Type: string (or Expression with resultType string). + :paramtype instance_pool_id: any + :keyword new_cluster_version: If not using an existing interactive cluster, this specifies the + Spark version of a new job cluster or instance pool nodes created for each run of this + activity. Required if instancePoolId is specified. Type: string (or Expression with resultType + string). + :paramtype new_cluster_version: any + :keyword new_cluster_num_of_worker: If not using an existing interactive cluster, this + specifies the number of worker nodes to use for the new job cluster or instance pool. For new + job clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means + auto-scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and + can only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is + specified. Type: string (or Expression with resultType string). + :paramtype new_cluster_num_of_worker: any + :keyword new_cluster_node_type: The node type of the new job cluster. This property is required + if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is + specified, this property is ignored. Type: string (or Expression with resultType string). + :paramtype new_cluster_node_type: any + :keyword new_cluster_spark_conf: A set of optional, user-specified Spark configuration + key-value pairs. + :paramtype new_cluster_spark_conf: dict[str, any] + :keyword new_cluster_spark_env_vars: A set of optional, user-specified Spark environment + variables key-value pairs. + :paramtype new_cluster_spark_env_vars: dict[str, any] + :keyword new_cluster_custom_tags: Additional tags for cluster resources. This property is + ignored in instance pool configurations. + :paramtype new_cluster_custom_tags: dict[str, any] + :keyword new_cluster_log_destination: Specify a location to deliver Spark driver, worker, and + event logs. Type: string (or Expression with resultType string). + :paramtype new_cluster_log_destination: any + :keyword new_cluster_driver_node_type: The driver node type for the new job cluster. This + property is ignored in instance pool configurations. Type: string (or Expression with + resultType string). + :paramtype new_cluster_driver_node_type: any + :keyword new_cluster_init_scripts: User-defined initialization scripts for the new cluster. + Type: array of strings (or Expression with resultType array of strings). + :paramtype new_cluster_init_scripts: any + :keyword new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This + property is now ignored, and takes the default elastic disk behavior in Databricks (elastic + disks are always enabled). Type: boolean (or Expression with resultType boolean). + :paramtype new_cluster_enable_elastic_disk: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword policy_id: The policy id for limiting the ability to configure clusters based on a + user defined set of rules. Type: string (or Expression with resultType string). + :paramtype policy_id: any + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(AzureDatabricksLinkedService, self).__init__(**kwargs) self.type = 'AzureDatabricks' # type: str self.domain = kwargs['domain'] @@ -4117,23 +5683,23 @@ class ExecutionActivity(Activity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy """ _validation = { @@ -4160,6 +5726,23 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + """ super(ExecutionActivity, self).__init__(**kwargs) self.type = 'Execution' # type: str self.linked_service_name = kwargs.get('linked_service_name', None) @@ -4171,29 +5754,29 @@ class AzureDataExplorerCommandActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param command: Required. A control command, according to the Azure Data Explorer command + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar command: Required. A control command, according to the Azure Data Explorer command syntax. Type: string (or Expression with resultType string). - :type command: any - :param command_timeout: Control command timeout. Type: string (or Expression with resultType + :vartype command: any + :ivar command_timeout: Control command timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). - :type command_timeout: any + :vartype command_timeout: any """ _validation = { @@ -4219,6 +5802,29 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword command: Required. A control command, according to the Azure Data Explorer command + syntax. Type: string (or Expression with resultType string). + :paramtype command: any + :keyword command_timeout: Control command timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). + :paramtype command_timeout: any + """ super(AzureDataExplorerCommandActivity, self).__init__(**kwargs) self.type = 'AzureDataExplorerCommand' # type: str self.command = kwargs['command'] @@ -4230,37 +5836,37 @@ class AzureDataExplorerLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL - will be in the format https://:code:``.:code:``.kusto.windows.net. - Type: string (or Expression with resultType string). - :type endpoint: any - :param service_principal_id: The ID of the service principal used to authenticate against Azure + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL will + be in the format https://:code:``.:code:``.kusto.windows.net. Type: + string (or Expression with resultType string). + :vartype endpoint: any + :ivar service_principal_id: The ID of the service principal used to authenticate against Azure Data Explorer. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: The key of the service principal used to authenticate against + :vartype service_principal_id: any + :ivar service_principal_key: The key of the service principal used to authenticate against Kusto. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param database: Required. Database name for connection. Type: string (or Expression with + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar database: Required. Database name for connection. Type: string (or Expression with resultType string). - :type database: any - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: any - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype database: any + :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string + (or Expression with resultType string). + :vartype tenant: any + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -4288,6 +5894,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL + will be in the format https://:code:``.:code:``.kusto.windows.net. + Type: string (or Expression with resultType string). + :paramtype endpoint: any + :keyword service_principal_id: The ID of the service principal used to authenticate against + Azure Data Explorer. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: The key of the service principal used to authenticate against + Kusto. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword database: Required. Database name for connection. Type: string (or Expression with + resultType string). + :paramtype database: any + :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :paramtype tenant: any + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(AzureDataExplorerLinkedService, self).__init__(**kwargs) self.type = 'AzureDataExplorer' # type: str self.endpoint = kwargs['endpoint'] @@ -4303,38 +5940,38 @@ class AzureDataExplorerSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the + :vartype disable_metrics_collection: any + :ivar ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the target Kusto table. Type: string. - :type ingestion_mapping_name: any - :param ingestion_mapping_as_json: An explicit column mapping description provided in a json + :vartype ingestion_mapping_name: any + :ivar ingestion_mapping_as_json: An explicit column mapping description provided in a json format. Type: string. - :type ingestion_mapping_as_json: any - :param flush_immediately: If set to true, any aggregation will be skipped. Default is false. + :vartype ingestion_mapping_as_json: any + :ivar flush_immediately: If set to true, any aggregation will be skipped. Default is false. Type: boolean. - :type flush_immediately: any + :vartype flush_immediately: any """ _validation = { @@ -4359,6 +5996,38 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the + target Kusto table. Type: string. + :paramtype ingestion_mapping_name: any + :keyword ingestion_mapping_as_json: An explicit column mapping description provided in a json + format. Type: string. + :paramtype ingestion_mapping_as_json: any + :keyword flush_immediately: If set to true, any aggregation will be skipped. Default is false. + Type: boolean. + :paramtype flush_immediately: any + """ super(AzureDataExplorerSink, self).__init__(**kwargs) self.type = 'AzureDataExplorerSink' # type: str self.ingestion_mapping_name = kwargs.get('ingestion_mapping_name', None) @@ -4371,35 +6040,35 @@ class AzureDataExplorerSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type: + :vartype disable_metrics_collection: any + :ivar query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type: string (or Expression with resultType string). - :type query: any - :param no_truncation: The name of the Boolean option that controls whether truncation is - applied to result-sets that go beyond a certain row-count limit. - :type no_truncation: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype query: any + :ivar no_truncation: The name of the Boolean option that controls whether truncation is applied + to result-sets that go beyond a certain row-count limit. + :vartype no_truncation: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -4424,6 +6093,35 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type: + string (or Expression with resultType string). + :paramtype query: any + :keyword no_truncation: The name of the Boolean option that controls whether truncation is + applied to result-sets that go beyond a certain row-count limit. + :paramtype no_truncation: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(AzureDataExplorerSource, self).__init__(**kwargs) self.type = 'AzureDataExplorerSource' # type: str self.query = kwargs['query'] @@ -4437,31 +6135,31 @@ class AzureDataExplorerTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table: The table name of the Azure Data Explorer database. Type: string (or Expression + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table: The table name of the Azure Data Explorer database. Type: string (or Expression with resultType string). - :type table: any + :vartype table: any """ _validation = { @@ -4486,6 +6184,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table: The table name of the Azure Data Explorer database. Type: string (or Expression + with resultType string). + :paramtype table: any + """ super(AzureDataExplorerTableDataset, self).__init__(**kwargs) self.type = 'AzureDataExplorerTable' # type: str self.table = kwargs.get('table', None) @@ -4496,44 +6219,44 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param account_name: Required. The Azure Data Lake Analytics account name. Type: string (or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar account_name: Required. The Azure Data Lake Analytics account name. Type: string (or Expression with resultType string). - :type account_name: any - :param service_principal_id: The ID of the application used to authenticate against the Azure + :vartype account_name: any + :ivar service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Analytics account. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: The Key of the application used to authenticate against the Azure + :vartype service_principal_id: any + :ivar service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Analytics account. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. The name or ID of the tenant to which the service principal belongs. + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: Required. The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: any - :param subscription_id: Data Lake Analytics account subscription ID (if different from Data + :vartype tenant: any + :ivar subscription_id: Data Lake Analytics account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). - :type subscription_id: any - :param resource_group_name: Data Lake Analytics account resource group name (if different from + :vartype subscription_id: any + :ivar resource_group_name: Data Lake Analytics account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). - :type resource_group_name: any - :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string (or Expression with + :vartype resource_group_name: any + :ivar data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string (or Expression with resultType string). - :type data_lake_analytics_uri: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype data_lake_analytics_uri: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -4563,6 +6286,44 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword account_name: Required. The Azure Data Lake Analytics account name. Type: string (or + Expression with resultType string). + :paramtype account_name: any + :keyword service_principal_id: The ID of the application used to authenticate against the Azure + Data Lake Analytics account. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: The Key of the application used to authenticate against the + Azure Data Lake Analytics account. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: Required. The name or ID of the tenant to which the service principal belongs. + Type: string (or Expression with resultType string). + :paramtype tenant: any + :keyword subscription_id: Data Lake Analytics account subscription ID (if different from Data + Factory account). Type: string (or Expression with resultType string). + :paramtype subscription_id: any + :keyword resource_group_name: Data Lake Analytics account resource group name (if different + from Data Factory account). Type: string (or Expression with resultType string). + :paramtype resource_group_name: any + :keyword data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string (or Expression + with resultType string). + :paramtype data_lake_analytics_uri: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AzureDataLakeAnalyticsLinkedService, self).__init__(**kwargs) self.type = 'AzureDataLakeAnalytics' # type: str self.account_name = kwargs['account_name'] @@ -4580,39 +6341,39 @@ class AzureDataLakeStoreDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param folder_path: Path to the folder in the Azure Data Lake Store. Type: string (or - Expression with resultType string). - :type folder_path: any - :param file_name: The name of the file in the Azure Data Lake Store. Type: string (or - Expression with resultType string). - :type file_name: any - :param format: The format of the Data Lake Store. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the item(s) in the Azure Data Lake + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar folder_path: Path to the folder in the Azure Data Lake Store. Type: string (or Expression + with resultType string). + :vartype folder_path: any + :ivar file_name: The name of the file in the Azure Data Lake Store. Type: string (or Expression + with resultType string). + :vartype file_name: any + :ivar format: The format of the Data Lake Store. + :vartype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :ivar compression: The data compression method used for the item(s) in the Azure Data Lake Store. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + :vartype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -4640,6 +6401,39 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword folder_path: Path to the folder in the Azure Data Lake Store. Type: string (or + Expression with resultType string). + :paramtype folder_path: any + :keyword file_name: The name of the file in the Azure Data Lake Store. Type: string (or + Expression with resultType string). + :paramtype file_name: any + :keyword format: The format of the Data Lake Store. + :paramtype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :keyword compression: The data compression method used for the item(s) in the Azure Data Lake + Store. + :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ super(AzureDataLakeStoreDataset, self).__init__(**kwargs) self.type = 'AzureDataLakeStoreFile' # type: str self.folder_path = kwargs.get('folder_path', None) @@ -4653,50 +6447,50 @@ class AzureDataLakeStoreLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param data_lake_store_uri: Required. Data Lake Store service URI. Type: string (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar data_lake_store_uri: Required. Data Lake Store service URI. Type: string (or Expression with resultType string). - :type data_lake_store_uri: any - :param service_principal_id: The ID of the application used to authenticate against the Azure + :vartype data_lake_store_uri: any + :ivar service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Store account. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: The Key of the application used to authenticate against the Azure + :vartype service_principal_id: any + :ivar service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Store account. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: any - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string + (or Expression with resultType string). + :vartype tenant: any + :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: any - :param account_name: Data Lake Store account name. Type: string (or Expression with resultType + :vartype azure_cloud_type: any + :ivar account_name: Data Lake Store account name. Type: string (or Expression with resultType string). - :type account_name: any - :param subscription_id: Data Lake Store account subscription ID (if different from Data Factory + :vartype account_name: any + :ivar subscription_id: Data Lake Store account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). - :type subscription_id: any - :param resource_group_name: Data Lake Store account resource group name (if different from Data + :vartype subscription_id: any + :ivar resource_group_name: Data Lake Store account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). - :type resource_group_name: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype resource_group_name: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype encrypted_credential: any + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -4727,6 +6521,50 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword data_lake_store_uri: Required. Data Lake Store service URI. Type: string (or + Expression with resultType string). + :paramtype data_lake_store_uri: any + :keyword service_principal_id: The ID of the application used to authenticate against the Azure + Data Lake Store account. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: The Key of the application used to authenticate against the + Azure Data Lake Store account. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :paramtype tenant: any + :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. + Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is + the data factory regions’ cloud type. Type: string (or Expression with resultType string). + :paramtype azure_cloud_type: any + :keyword account_name: Data Lake Store account name. Type: string (or Expression with + resultType string). + :paramtype account_name: any + :keyword subscription_id: Data Lake Store account subscription ID (if different from Data + Factory account). Type: string (or Expression with resultType string). + :paramtype subscription_id: any + :keyword resource_group_name: Data Lake Store account resource group name (if different from + Data Factory account). Type: string (or Expression with resultType string). + :paramtype resource_group_name: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(AzureDataLakeStoreLinkedService, self).__init__(**kwargs) self.type = 'AzureDataLakeStore' # type: str self.data_lake_store_uri = kwargs['data_lake_store_uri'] @@ -4746,17 +6584,17 @@ class AzureDataLakeStoreLocation(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any + :vartype file_name: any """ _validation = { @@ -4774,6 +6612,17 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + """ super(AzureDataLakeStoreLocation, self).__init__(**kwargs) self.type = 'AzureDataLakeStoreLocation' # type: str @@ -4783,52 +6632,52 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or Expression with + :vartype recursive: any + :ivar wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: any - :param wildcard_file_name: ADLS wildcardFileName. Type: string (or Expression with resultType + :vartype wildcard_folder_path: any + :ivar wildcard_file_name: ADLS wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: any - :param file_list_path: Point to a text file that lists each file (relative path to the path + :vartype wildcard_file_name: any + :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: any - :param list_after: Lists files after the value (exclusive) based on file/folder names’ + :vartype file_list_path: any + :ivar list_after: Lists files after the value (exclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). - :type list_after: any - :param list_before: Lists files before the value (inclusive) based on file/folder names’ + :vartype list_after: any + :ivar list_before: Lists files before the value (inclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). - :type list_before: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype list_before: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any - :param delete_files_after_completion: Indicates whether the source files need to be deleted + :vartype partition_root_path: any + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: any - :param modified_datetime_start: The start of file's modified datetime. Type: string (or + :vartype delete_files_after_completion: any + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any + :vartype modified_datetime_end: any """ _validation = { @@ -4857,6 +6706,52 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or Expression with + resultType string). + :paramtype wildcard_folder_path: any + :keyword wildcard_file_name: ADLS wildcardFileName. Type: string (or Expression with resultType + string). + :paramtype wildcard_file_name: any + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: any + :keyword list_after: Lists files after the value (exclusive) based on file/folder names’ + lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders + under the folderPath. Type: string (or Expression with resultType string). + :paramtype list_after: any + :keyword list_before: Lists files before the value (inclusive) based on file/folder names’ + lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders + under the folderPath. Type: string (or Expression with resultType string). + :paramtype list_before: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: any + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + """ super(AzureDataLakeStoreReadSettings, self).__init__(**kwargs) self.type = 'AzureDataLakeStoreReadSettings' # type: str self.recursive = kwargs.get('recursive', None) @@ -4877,33 +6772,33 @@ class AzureDataLakeStoreSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: any - :param enable_adls_single_file_parallel: Single File Parallel. - :type enable_adls_single_file_parallel: any + :vartype disable_metrics_collection: any + :ivar copy_behavior: The type of copy behavior for copy sink. + :vartype copy_behavior: any + :ivar enable_adls_single_file_parallel: Single File Parallel. + :vartype enable_adls_single_file_parallel: any """ _validation = { @@ -4927,6 +6822,33 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword copy_behavior: The type of copy behavior for copy sink. + :paramtype copy_behavior: any + :keyword enable_adls_single_file_parallel: Single File Parallel. + :paramtype enable_adls_single_file_parallel: any + """ super(AzureDataLakeStoreSink, self).__init__(**kwargs) self.type = 'AzureDataLakeStoreSink' # type: str self.copy_behavior = kwargs.get('copy_behavior', None) @@ -4938,26 +6860,26 @@ class AzureDataLakeStoreSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any + :vartype recursive: any """ _validation = { @@ -4978,6 +6900,26 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + """ super(AzureDataLakeStoreSource, self).__init__(**kwargs) self.type = 'AzureDataLakeStoreSource' # type: str self.recursive = kwargs.get('recursive', None) @@ -4988,23 +6930,23 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: any - :param expiry_date_time: Specifies the expiry time of the written files. The time is applied to + :vartype disable_metrics_collection: any + :ivar copy_behavior: The type of copy behavior for copy sink. + :vartype copy_behavior: any + :ivar expiry_date_time: Specifies the expiry time of the written files. The time is applied to the UTC time zone in the format of "2018-12-01T05:00:00Z". Default value is NULL. Type: integer (or Expression with resultType integer). - :type expiry_date_time: any + :vartype expiry_date_time: any """ _validation = { @@ -5024,6 +6966,23 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword copy_behavior: The type of copy behavior for copy sink. + :paramtype copy_behavior: any + :keyword expiry_date_time: Specifies the expiry time of the written files. The time is applied + to the UTC time zone in the format of "2018-12-01T05:00:00Z". Default value is NULL. Type: + integer (or Expression with resultType integer). + :paramtype expiry_date_time: any + """ super(AzureDataLakeStoreWriteSettings, self).__init__(**kwargs) self.type = 'AzureDataLakeStoreWriteSettings' # type: str self.expiry_date_time = kwargs.get('expiry_date_time', None) @@ -5034,46 +6993,46 @@ class AzureFileStorageLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Host name of the server. Type: string (or Expression with resultType string). - :type host: any - :param user_id: User ID to logon the server. Type: string (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Host name of the server. Type: string (or Expression with resultType string). + :vartype host: any + :ivar user_id: User ID to logon the server. Type: string (or Expression with resultType string). - :type user_id: any - :param password: Password to logon the server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param connection_string: The connection string. It is mutually exclusive with sasUri property. + :vartype user_id: any + :ivar password: Password to logon the server. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure File resource. It is mutually exclusive with + :vartype connection_string: any + :ivar account_key: The Azure key vault secret reference of accountKey in connection string. + :vartype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar sas_uri: SAS URI of the Azure File resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: any - :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param file_share: The azure file share name. It is required when auth with - accountKey/sasToken. Type: string (or Expression with resultType string). - :type file_share: any - :param snapshot: The azure file share snapshot version. Type: string (or Expression with + :vartype sas_uri: any + :ivar sas_token: The Azure key vault secret reference of sasToken in sas uri. + :vartype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar file_share: The azure file share name. It is required when auth with accountKey/sasToken. + Type: string (or Expression with resultType string). + :vartype file_share: any + :ivar snapshot: The azure file share snapshot version. Type: string (or Expression with resultType string). - :type snapshot: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype snapshot: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -5103,6 +7062,46 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Host name of the server. Type: string (or Expression with resultType string). + :paramtype host: any + :keyword user_id: User ID to logon the server. Type: string (or Expression with resultType + string). + :paramtype user_id: any + :keyword password: Password to logon the server. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword connection_string: The connection string. It is mutually exclusive with sasUri + property. Type: string, SecureString or AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword account_key: The Azure key vault secret reference of accountKey in connection string. + :paramtype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword sas_uri: SAS URI of the Azure File resource. It is mutually exclusive with + connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. + :paramtype sas_uri: any + :keyword sas_token: The Azure key vault secret reference of sasToken in sas uri. + :paramtype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword file_share: The azure file share name. It is required when auth with + accountKey/sasToken. Type: string (or Expression with resultType string). + :paramtype file_share: any + :keyword snapshot: The azure file share snapshot version. Type: string (or Expression with + resultType string). + :paramtype snapshot: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AzureFileStorageLinkedService, self).__init__(**kwargs) self.type = 'AzureFileStorage' # type: str self.host = kwargs.get('host', None) @@ -5122,17 +7121,17 @@ class AzureFileStorageLocation(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any + :vartype file_name: any """ _validation = { @@ -5150,6 +7149,17 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + """ super(AzureFileStorageLocation, self).__init__(**kwargs) self.type = 'AzureFileStorageLocation' # type: str @@ -5159,47 +7169,47 @@ class AzureFileStorageReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param wildcard_folder_path: Azure File Storage wildcardFolderPath. Type: string (or Expression + :vartype recursive: any + :ivar wildcard_folder_path: Azure File Storage wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: any - :param wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression - with resultType string). - :type wildcard_file_name: any - :param prefix: The prefix filter for the Azure File name starting from root path. Type: string + :vartype wildcard_folder_path: any + :ivar wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression with + resultType string). + :vartype wildcard_file_name: any + :ivar prefix: The prefix filter for the Azure File name starting from root path. Type: string (or Expression with resultType string). - :type prefix: any - :param file_list_path: Point to a text file that lists each file (relative path to the path + :vartype prefix: any + :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype file_list_path: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any - :param delete_files_after_completion: Indicates whether the source files need to be deleted + :vartype partition_root_path: any + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: any - :param modified_datetime_start: The start of file's modified datetime. Type: string (or + :vartype delete_files_after_completion: any + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any + :vartype modified_datetime_end: any """ _validation = { @@ -5227,6 +7237,47 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword wildcard_folder_path: Azure File Storage wildcardFolderPath. Type: string (or + Expression with resultType string). + :paramtype wildcard_folder_path: any + :keyword wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression + with resultType string). + :paramtype wildcard_file_name: any + :keyword prefix: The prefix filter for the Azure File name starting from root path. Type: + string (or Expression with resultType string). + :paramtype prefix: any + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: any + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + """ super(AzureFileStorageReadSettings, self).__init__(**kwargs) self.type = 'AzureFileStorageReadSettings' # type: str self.recursive = kwargs.get('recursive', None) @@ -5246,19 +7297,19 @@ class AzureFileStorageWriteSettings(StoreWriteSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: any + :vartype disable_metrics_collection: any + :ivar copy_behavior: The type of copy behavior for copy sink. + :vartype copy_behavior: any """ _validation = { @@ -5277,6 +7328,19 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword copy_behavior: The type of copy behavior for copy sink. + :paramtype copy_behavior: any + """ super(AzureFileStorageWriteSettings, self).__init__(**kwargs) self.type = 'AzureFileStorageWriteSettings' # type: str @@ -5286,36 +7350,36 @@ class AzureFunctionActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param method: Required. Rest API method for target endpoint. Possible values include: "GET", + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar method: Required. Rest API method for target endpoint. Possible values include: "GET", "POST", "PUT", "DELETE", "OPTIONS", "HEAD", "TRACE". - :type method: str or ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod - :param function_name: Required. Name of the Function that the Azure Function Activity will - call. Type: string (or Expression with resultType string). - :type function_name: any - :param headers: Represents the headers that will be sent to the request. For example, to set - the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": + :vartype method: str or ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod + :ivar function_name: Required. Name of the Function that the Azure Function Activity will call. + Type: string (or Expression with resultType string). + :vartype function_name: any + :ivar headers: Represents the headers that will be sent to the request. For example, to set the + language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - :type headers: any - :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT + :vartype headers: any + :ivar body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). - :type body: any + :vartype body: any """ _validation = { @@ -5344,6 +7408,36 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword method: Required. Rest API method for target endpoint. Possible values include: "GET", + "POST", "PUT", "DELETE", "OPTIONS", "HEAD", "TRACE". + :paramtype method: str or ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod + :keyword function_name: Required. Name of the Function that the Azure Function Activity will + call. Type: string (or Expression with resultType string). + :paramtype function_name: any + :keyword headers: Represents the headers that will be sent to the request. For example, to set + the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": + "application/json" }. Type: string (or Expression with resultType string). + :paramtype headers: any + :keyword body: Represents the payload that will be sent to the endpoint. Required for POST/PUT + method, not allowed for GET method Type: string (or Expression with resultType string). + :paramtype body: any + """ super(AzureFunctionActivity, self).__init__(**kwargs) self.type = 'AzureFunctionActivity' # type: str self.method = kwargs['method'] @@ -5357,35 +7451,35 @@ class AzureFunctionLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param function_app_url: Required. The endpoint of the Azure Function App. URL will be in the + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar function_app_url: Required. The endpoint of the Azure Function App. URL will be in the format https://:code:``.azurewebsites.net. - :type function_app_url: any - :param function_key: Function or Host key for Azure Function App. - :type function_key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype function_app_url: any + :ivar function_key: Function or Host key for Azure Function App. + :vartype function_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference - :param resource_id: Allowed token audiences for azure function. - :type resource_id: any - :param authentication: Type of authentication (Required to specify MSI) used to connect to + :vartype encrypted_credential: any + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference + :ivar resource_id: Allowed token audiences for azure function. + :vartype resource_id: any + :ivar authentication: Type of authentication (Required to specify MSI) used to connect to AzureFunction. Type: string (or Expression with resultType string). - :type authentication: any + :vartype authentication: any """ _validation = { @@ -5412,6 +7506,35 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword function_app_url: Required. The endpoint of the Azure Function App. URL will be in the + format https://:code:``.azurewebsites.net. + :paramtype function_app_url: any + :keyword function_key: Function or Host key for Azure Function App. + :paramtype function_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + :keyword resource_id: Allowed token audiences for azure function. + :paramtype resource_id: any + :keyword authentication: Type of authentication (Required to specify MSI) used to connect to + AzureFunction. Type: string (or Expression with resultType string). + :paramtype authentication: any + """ super(AzureFunctionLinkedService, self).__init__(**kwargs) self.type = 'AzureFunction' # type: str self.function_app_url = kwargs['function_app_url'] @@ -5427,24 +7550,24 @@ class AzureKeyVaultLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param base_url: Required. The base URL of the Azure Key Vault. e.g. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar base_url: Required. The base URL of the Azure Key Vault. e.g. https://myakv.vault.azure.net Type: string (or Expression with resultType string). - :type base_url: any - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype base_url: any + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -5467,6 +7590,24 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword base_url: Required. The base URL of the Azure Key Vault. e.g. + https://myakv.vault.azure.net Type: string (or Expression with resultType string). + :paramtype base_url: any + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(AzureKeyVaultLinkedService, self).__init__(**kwargs) self.type = 'AzureKeyVault' # type: str self.base_url = kwargs['base_url'] @@ -5481,8 +7622,8 @@ class SecretBase(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of the secret.Constant filled by server. - :type type: str + :ivar type: Required. Type of the secret.Constant filled by server. + :vartype type: str """ _validation = { @@ -5501,6 +7642,8 @@ def __init__( self, **kwargs ): + """ + """ super(SecretBase, self).__init__(**kwargs) self.type = None # type: Optional[str] @@ -5510,16 +7653,16 @@ class AzureKeyVaultSecretReference(SecretBase): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of the secret.Constant filled by server. - :type type: str - :param store: Required. The Azure Key Vault linked service reference. - :type store: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param secret_name: Required. The name of the secret in Azure Key Vault. Type: string (or + :ivar type: Required. Type of the secret.Constant filled by server. + :vartype type: str + :ivar store: Required. The Azure Key Vault linked service reference. + :vartype store: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar secret_name: Required. The name of the secret in Azure Key Vault. Type: string (or Expression with resultType string). - :type secret_name: any - :param secret_version: The version of the secret in Azure Key Vault. The default value is the + :vartype secret_name: any + :ivar secret_version: The version of the secret in Azure Key Vault. The default value is the latest version of the secret. Type: string (or Expression with resultType string). - :type secret_version: any + :vartype secret_version: any """ _validation = { @@ -5539,6 +7682,16 @@ def __init__( self, **kwargs ): + """ + :keyword store: Required. The Azure Key Vault linked service reference. + :paramtype store: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword secret_name: Required. The name of the secret in Azure Key Vault. Type: string (or + Expression with resultType string). + :paramtype secret_name: any + :keyword secret_version: The version of the secret in Azure Key Vault. The default value is the + latest version of the secret. Type: string (or Expression with resultType string). + :paramtype secret_version: any + """ super(AzureKeyVaultSecretReference, self).__init__(**kwargs) self.type = 'AzureKeyVaultSecret' # type: str self.store = kwargs['store'] @@ -5551,28 +7704,28 @@ class AzureMariaDBLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: An ODBC connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_string: any + :ivar pwd: The Azure key vault secret reference of password in connection string. + :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -5595,6 +7748,28 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword pwd: The Azure key vault secret reference of password in connection string. + :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AzureMariaDBLinkedService, self).__init__(**kwargs) self.type = 'AzureMariaDB' # type: str self.connection_string = kwargs.get('connection_string', None) @@ -5607,32 +7782,32 @@ class AzureMariaDBSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -5655,6 +7830,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(AzureMariaDBSource, self).__init__(**kwargs) self.type = 'AzureMariaDBSource' # type: str self.query = kwargs.get('query', None) @@ -5665,30 +7866,30 @@ class AzureMariaDBTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -5713,6 +7914,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(AzureMariaDBTableDataset, self).__init__(**kwargs) self.type = 'AzureMariaDBTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -5723,38 +7948,37 @@ class AzureMLBatchExecutionActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param global_parameters: Key,Value pairs to be passed to the Azure ML Batch Execution Service + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar global_parameters: Key,Value pairs to be passed to the Azure ML Batch Execution Service endpoint. Keys must match the names of web service parameters defined in the published Azure ML web service. Values will be passed in the GlobalParameters property of the Azure ML batch execution request. - :type global_parameters: dict[str, any] - :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web + :vartype global_parameters: dict[str, any] + :ivar web_service_outputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This information will be passed in the WebServiceOutputs property of the Azure ML batch execution request. - :type web_service_outputs: dict[str, ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] - :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web - Service Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This - information will be passed in the WebServiceInputs property of the Azure ML batch execution - request. - :type web_service_inputs: dict[str, ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] + :vartype web_service_outputs: dict[str, ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] + :ivar web_service_inputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web Service + Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This information + will be passed in the WebServiceInputs property of the Azure ML batch execution request. + :vartype web_service_inputs: dict[str, ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] """ _validation = { @@ -5780,6 +8004,38 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword global_parameters: Key,Value pairs to be passed to the Azure ML Batch Execution + Service endpoint. Keys must match the names of web service parameters defined in the published + Azure ML web service. Values will be passed in the GlobalParameters property of the Azure ML + batch execution request. + :paramtype global_parameters: dict[str, any] + :keyword web_service_outputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web + Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This + information will be passed in the WebServiceOutputs property of the Azure ML batch execution + request. + :paramtype web_service_outputs: dict[str, ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] + :keyword web_service_inputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web + Service Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This + information will be passed in the WebServiceInputs property of the Azure ML batch execution + request. + :paramtype web_service_inputs: dict[str, ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] + """ super(AzureMLBatchExecutionActivity, self).__init__(**kwargs) self.type = 'AzureMLBatchExecution' # type: str self.global_parameters = kwargs.get('global_parameters', None) @@ -5792,54 +8048,53 @@ class AzureMLExecutePipelineActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param ml_pipeline_id: ID of the published Azure ML pipeline. Type: string (or Expression with + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar ml_pipeline_id: ID of the published Azure ML pipeline. Type: string (or Expression with resultType string). - :type ml_pipeline_id: any - :param ml_pipeline_endpoint_id: ID of the published Azure ML pipeline endpoint. Type: string - (or Expression with resultType string). - :type ml_pipeline_endpoint_id: any - :param version: Version of the published Azure ML pipeline endpoint. Type: string (or + :vartype ml_pipeline_id: any + :ivar ml_pipeline_endpoint_id: ID of the published Azure ML pipeline endpoint. Type: string (or Expression with resultType string). - :type version: any - :param experiment_name: Run history experiment name of the pipeline run. This information will + :vartype ml_pipeline_endpoint_id: any + :ivar version: Version of the published Azure ML pipeline endpoint. Type: string (or Expression + with resultType string). + :vartype version: any + :ivar experiment_name: Run history experiment name of the pipeline run. This information will be passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with resultType string). - :type experiment_name: any - :param ml_pipeline_parameters: Key,Value pairs to be passed to the published Azure ML pipeline + :vartype experiment_name: any + :ivar ml_pipeline_parameters: Key,Value pairs to be passed to the published Azure ML pipeline endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. Values will be passed in the ParameterAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). - :type ml_pipeline_parameters: any - :param data_path_assignments: Dictionary used for changing data path assignments without + :vartype ml_pipeline_parameters: any + :ivar data_path_assignments: Dictionary used for changing data path assignments without retraining. Values will be passed in the dataPathAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). - :type data_path_assignments: any - :param ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be + :vartype data_path_assignments: any + :ivar ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType string). - :type ml_parent_run_id: any - :param continue_on_step_failure: Whether to continue execution of other steps in the - PipelineRun if a step fails. This information will be passed in the continueOnStepFailure - property of the published pipeline execution request. Type: boolean (or Expression with - resultType boolean). - :type continue_on_step_failure: any + :vartype ml_parent_run_id: any + :ivar continue_on_step_failure: Whether to continue execution of other steps in the PipelineRun + if a step fails. This information will be passed in the continueOnStepFailure property of the + published pipeline execution request. Type: boolean (or Expression with resultType boolean). + :vartype continue_on_step_failure: any """ _validation = { @@ -5870,6 +8125,54 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword ml_pipeline_id: ID of the published Azure ML pipeline. Type: string (or Expression + with resultType string). + :paramtype ml_pipeline_id: any + :keyword ml_pipeline_endpoint_id: ID of the published Azure ML pipeline endpoint. Type: string + (or Expression with resultType string). + :paramtype ml_pipeline_endpoint_id: any + :keyword version: Version of the published Azure ML pipeline endpoint. Type: string (or + Expression with resultType string). + :paramtype version: any + :keyword experiment_name: Run history experiment name of the pipeline run. This information + will be passed in the ExperimentName property of the published pipeline execution request. + Type: string (or Expression with resultType string). + :paramtype experiment_name: any + :keyword ml_pipeline_parameters: Key,Value pairs to be passed to the published Azure ML + pipeline endpoint. Keys must match the names of pipeline parameters defined in the published + pipeline. Values will be passed in the ParameterAssignments property of the published pipeline + execution request. Type: object with key value pairs (or Expression with resultType object). + :paramtype ml_pipeline_parameters: any + :keyword data_path_assignments: Dictionary used for changing data path assignments without + retraining. Values will be passed in the dataPathAssignments property of the published pipeline + execution request. Type: object with key value pairs (or Expression with resultType object). + :paramtype data_path_assignments: any + :keyword ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will + be passed in the ParentRunId property of the published pipeline execution request. Type: string + (or Expression with resultType string). + :paramtype ml_parent_run_id: any + :keyword continue_on_step_failure: Whether to continue execution of other steps in the + PipelineRun if a step fails. This information will be passed in the continueOnStepFailure + property of the published pipeline execution request. Type: boolean (or Expression with + resultType boolean). + :paramtype continue_on_step_failure: any + """ super(AzureMLExecutePipelineActivity, self).__init__(**kwargs) self.type = 'AzureMLExecutePipeline' # type: str self.ml_pipeline_id = kwargs.get('ml_pipeline_id', None) @@ -5887,44 +8190,44 @@ class AzureMLLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML Studio Web Service + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar ml_endpoint: Required. The Batch Execution REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). - :type ml_endpoint: any - :param api_key: Required. The API key for accessing the Azure ML model endpoint. - :type api_key: ~azure.mgmt.datafactory.models.SecretBase - :param update_resource_endpoint: The Update Resource REST URL for an Azure ML Studio Web - Service endpoint. Type: string (or Expression with resultType string). - :type update_resource_endpoint: any - :param service_principal_id: The ID of the service principal used to authenticate against the + :vartype ml_endpoint: any + :ivar api_key: Required. The API key for accessing the Azure ML model endpoint. + :vartype api_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar update_resource_endpoint: The Update Resource REST URL for an Azure ML Studio Web Service + endpoint. Type: string (or Expression with resultType string). + :vartype update_resource_endpoint: any + :ivar service_principal_id: The ID of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: The key of the service principal used to authenticate against the + :vartype service_principal_id: any + :ivar service_principal_key: The key of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string + (or Expression with resultType string). + :vartype tenant: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param authentication: Type of authentication (Required to specify MSI) used to connect to + :vartype encrypted_credential: any + :ivar authentication: Type of authentication (Required to specify MSI) used to connect to AzureML. Type: string (or Expression with resultType string). - :type authentication: any + :vartype authentication: any """ _validation = { @@ -5954,6 +8257,44 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword ml_endpoint: Required. The Batch Execution REST URL for an Azure ML Studio Web Service + endpoint. Type: string (or Expression with resultType string). + :paramtype ml_endpoint: any + :keyword api_key: Required. The API key for accessing the Azure ML model endpoint. + :paramtype api_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword update_resource_endpoint: The Update Resource REST URL for an Azure ML Studio Web + Service endpoint. Type: string (or Expression with resultType string). + :paramtype update_resource_endpoint: any + :keyword service_principal_id: The ID of the service principal used to authenticate against the + ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression + with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: The key of the service principal used to authenticate against + the ARM-based updateResourceEndpoint of an Azure ML Studio web service. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :paramtype tenant: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword authentication: Type of authentication (Required to specify MSI) used to connect to + AzureML. Type: string (or Expression with resultType string). + :paramtype authentication: any + """ super(AzureMLLinkedService, self).__init__(**kwargs) self.type = 'AzureML' # type: str self.ml_endpoint = kwargs['ml_endpoint'] @@ -5971,42 +8312,42 @@ class AzureMLServiceLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param subscription_id: Required. Azure ML Service workspace subscription ID. Type: string (or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar subscription_id: Required. Azure ML Service workspace subscription ID. Type: string (or Expression with resultType string). - :type subscription_id: any - :param resource_group_name: Required. Azure ML Service workspace resource group name. Type: + :vartype subscription_id: any + :ivar resource_group_name: Required. Azure ML Service workspace resource group name. Type: string (or Expression with resultType string). - :type resource_group_name: any - :param ml_workspace_name: Required. Azure ML Service workspace name. Type: string (or - Expression with resultType string). - :type ml_workspace_name: any - :param service_principal_id: The ID of the service principal used to authenticate against the + :vartype resource_group_name: any + :ivar ml_workspace_name: Required. Azure ML Service workspace name. Type: string (or Expression + with resultType string). + :vartype ml_workspace_name: any + :ivar service_principal_id: The ID of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: The key of the service principal used to authenticate against the + :vartype service_principal_id: any + :ivar service_principal_key: The key of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string + (or Expression with resultType string). + :vartype tenant: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -6036,6 +8377,42 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword subscription_id: Required. Azure ML Service workspace subscription ID. Type: string + (or Expression with resultType string). + :paramtype subscription_id: any + :keyword resource_group_name: Required. Azure ML Service workspace resource group name. Type: + string (or Expression with resultType string). + :paramtype resource_group_name: any + :keyword ml_workspace_name: Required. Azure ML Service workspace name. Type: string (or + Expression with resultType string). + :paramtype ml_workspace_name: any + :keyword service_principal_id: The ID of the service principal used to authenticate against the + endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType + string). + :paramtype service_principal_id: any + :keyword service_principal_key: The key of the service principal used to authenticate against + the endpoint of a published Azure ML Service pipeline. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :paramtype tenant: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AzureMLServiceLinkedService, self).__init__(**kwargs) self.type = 'AzureMLService' # type: str self.subscription_id = kwargs['subscription_id'] @@ -6052,33 +8429,34 @@ class AzureMLUpdateResourceActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param trained_model_name: Required. Name of the Trained Model module in the Web Service + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar trained_model_name: Required. Name of the Trained Model module in the Web Service experiment to be updated. Type: string (or Expression with resultType string). - :type trained_model_name: any - :param trained_model_linked_service_name: Required. Name of Azure Storage linked service - holding the .ilearner file that will be uploaded by the update operation. - :type trained_model_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param trained_model_file_path: Required. The relative file path in trainedModelLinkedService - to represent the .ilearner file that will be uploaded by the update operation. Type: string - (or Expression with resultType string). - :type trained_model_file_path: any + :vartype trained_model_name: any + :ivar trained_model_linked_service_name: Required. Name of Azure Storage linked service holding + the .ilearner file that will be uploaded by the update operation. + :vartype trained_model_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar trained_model_file_path: Required. The relative file path in trainedModelLinkedService to + represent the .ilearner file that will be uploaded by the update operation. Type: string (or + Expression with resultType string). + :vartype trained_model_file_path: any """ _validation = { @@ -6107,6 +8485,34 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword trained_model_name: Required. Name of the Trained Model module in the Web Service + experiment to be updated. Type: string (or Expression with resultType string). + :paramtype trained_model_name: any + :keyword trained_model_linked_service_name: Required. Name of Azure Storage linked service + holding the .ilearner file that will be uploaded by the update operation. + :paramtype trained_model_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword trained_model_file_path: Required. The relative file path in trainedModelLinkedService + to represent the .ilearner file that will be uploaded by the update operation. Type: string + (or Expression with resultType string). + :paramtype trained_model_file_path: any + """ super(AzureMLUpdateResourceActivity, self).__init__(**kwargs) self.type = 'AzureMLUpdateResource' # type: str self.trained_model_name = kwargs['trained_model_name'] @@ -6119,12 +8525,12 @@ class AzureMLWebServiceFile(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param file_path: Required. The relative file path, including container name, in the Azure Blob + :ivar file_path: Required. The relative file path, including container name, in the Azure Blob Storage specified by the LinkedService. Type: string (or Expression with resultType string). - :type file_path: any - :param linked_service_name: Required. Reference to an Azure Storage LinkedService, where Azure + :vartype file_path: any + :ivar linked_service_name: Required. Reference to an Azure Storage LinkedService, where Azure ML WebService Input/Output file located. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { @@ -6141,6 +8547,15 @@ def __init__( self, **kwargs ): + """ + :keyword file_path: Required. The relative file path, including container name, in the Azure + Blob Storage specified by the LinkedService. Type: string (or Expression with resultType + string). + :paramtype file_path: any + :keyword linked_service_name: Required. Reference to an Azure Storage LinkedService, where + Azure ML WebService Input/Output file located. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + """ super(AzureMLWebServiceFile, self).__init__(**kwargs) self.file_path = kwargs['file_path'] self.linked_service_name = kwargs['linked_service_name'] @@ -6151,28 +8566,28 @@ class AzureMySqlLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_string: any + :ivar password: The Azure key vault secret reference of password in connection string. + :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -6196,6 +8611,28 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword password: The Azure key vault secret reference of password in connection string. + :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AzureMySqlLinkedService, self).__init__(**kwargs) self.type = 'AzureMySql' # type: str self.connection_string = kwargs['connection_string'] @@ -6208,32 +8645,32 @@ class AzureMySqlSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param pre_copy_script: A query to execute before starting the copy. Type: string (or - Expression with resultType string). - :type pre_copy_script: any + :vartype disable_metrics_collection: any + :ivar pre_copy_script: A query to execute before starting the copy. Type: string (or Expression + with resultType string). + :vartype pre_copy_script: any """ _validation = { @@ -6256,6 +8693,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :paramtype pre_copy_script: any + """ super(AzureMySqlSink, self).__init__(**kwargs) self.type = 'AzureMySqlSink' # type: str self.pre_copy_script = kwargs.get('pre_copy_script', None) @@ -6266,31 +8729,31 @@ class AzureMySqlSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: Database query. Type: string (or Expression with resultType string). - :type query: any + :vartype additional_columns: any + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: any """ _validation = { @@ -6313,6 +8776,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: any + """ super(AzureMySqlSource, self).__init__(**kwargs) self.type = 'AzureMySqlSource' # type: str self.query = kwargs.get('query', None) @@ -6323,34 +8811,34 @@ class AzureMySqlTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The Azure MySQL database table name. Type: string (or Expression with + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The Azure MySQL database table name. Type: string (or Expression with resultType string). - :type table_name: any - :param table: The name of Azure MySQL database table. Type: string (or Expression with + :vartype table_name: any + :ivar table: The name of Azure MySQL database table. Type: string (or Expression with resultType string). - :type table: any + :vartype table: any """ _validation = { @@ -6376,6 +8864,34 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The Azure MySQL database table name. Type: string (or Expression with + resultType string). + :paramtype table_name: any + :keyword table: The name of Azure MySQL database table. Type: string (or Expression with + resultType string). + :paramtype table: any + """ super(AzureMySqlTableDataset, self).__init__(**kwargs) self.type = 'AzureMySqlTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -6387,28 +8903,28 @@ class AzurePostgreSqlLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: An ODBC connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_string: any + :ivar password: The Azure key vault secret reference of password in connection string. + :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -6431,6 +8947,28 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword password: The Azure key vault secret reference of password in connection string. + :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AzurePostgreSqlLinkedService, self).__init__(**kwargs) self.type = 'AzurePostgreSql' # type: str self.connection_string = kwargs.get('connection_string', None) @@ -6443,32 +8981,32 @@ class AzurePostgreSqlSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param pre_copy_script: A query to execute before starting the copy. Type: string (or - Expression with resultType string). - :type pre_copy_script: any + :vartype disable_metrics_collection: any + :ivar pre_copy_script: A query to execute before starting the copy. Type: string (or Expression + with resultType string). + :vartype pre_copy_script: any """ _validation = { @@ -6491,6 +9029,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :paramtype pre_copy_script: any + """ super(AzurePostgreSqlSink, self).__init__(**kwargs) self.type = 'AzurePostgreSqlSink' # type: str self.pre_copy_script = kwargs.get('pre_copy_script', None) @@ -6501,32 +9065,32 @@ class AzurePostgreSqlSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -6549,6 +9113,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(AzurePostgreSqlSource, self).__init__(**kwargs) self.type = 'AzurePostgreSqlSource' # type: str self.query = kwargs.get('query', None) @@ -6559,37 +9149,37 @@ class AzurePostgreSqlTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name of the Azure PostgreSQL database which includes both schema + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name of the Azure PostgreSQL database which includes both schema and table. Type: string (or Expression with resultType string). - :type table_name: any - :param table: The table name of the Azure PostgreSQL database. Type: string (or Expression with + :vartype table_name: any + :ivar table: The table name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). - :type table: any - :param schema_type_properties_schema: The schema name of the Azure PostgreSQL database. Type: + :vartype table: any + :ivar schema_type_properties_schema: The schema name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any + :vartype schema_type_properties_schema: any """ _validation = { @@ -6616,6 +9206,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name of the Azure PostgreSQL database which includes both schema + and table. Type: string (or Expression with resultType string). + :paramtype table_name: any + :keyword table: The table name of the Azure PostgreSQL database. Type: string (or Expression + with resultType string). + :paramtype table: any + :keyword schema_type_properties_schema: The schema name of the Azure PostgreSQL database. Type: + string (or Expression with resultType string). + :paramtype schema_type_properties_schema: any + """ super(AzurePostgreSqlTableDataset, self).__init__(**kwargs) self.type = 'AzurePostgreSqlTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -6628,29 +9249,29 @@ class AzureQueueSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any + :vartype disable_metrics_collection: any """ _validation = { @@ -6672,6 +9293,29 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + """ super(AzureQueueSink, self).__init__(**kwargs) self.type = 'AzureQueueSink' # type: str @@ -6681,31 +9325,31 @@ class AzureSearchIndexDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param index_name: Required. The name of the Azure Search Index. Type: string (or Expression + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar index_name: Required. The name of the Azure Search Index. Type: string (or Expression with resultType string). - :type index_name: any + :vartype index_name: any """ _validation = { @@ -6731,6 +9375,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword index_name: Required. The name of the Azure Search Index. Type: string (or Expression + with resultType string). + :paramtype index_name: any + """ super(AzureSearchIndexDataset, self).__init__(**kwargs) self.type = 'AzureSearchIndex' # type: str self.index_name = kwargs['index_name'] @@ -6741,32 +9410,33 @@ class AzureSearchIndexSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param write_behavior: Specify the write behavior when upserting documents into Azure Search + :vartype disable_metrics_collection: any + :ivar write_behavior: Specify the write behavior when upserting documents into Azure Search Index. Possible values include: "Merge", "Upload". - :type write_behavior: str or ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType + :vartype write_behavior: str or + ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType """ _validation = { @@ -6789,6 +9459,33 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword write_behavior: Specify the write behavior when upserting documents into Azure Search + Index. Possible values include: "Merge", "Upload". + :paramtype write_behavior: str or + ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType + """ super(AzureSearchIndexSink, self).__init__(**kwargs) self.type = 'AzureSearchIndexSink' # type: str self.write_behavior = kwargs.get('write_behavior', None) @@ -6799,28 +9496,28 @@ class AzureSearchLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param url: Required. URL for Azure Search service. Type: string (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar url: Required. URL for Azure Search service. Type: string (or Expression with resultType string). - :type url: any - :param key: Admin Key for Azure Search service. - :type key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype url: any + :ivar key: Admin Key for Azure Search service. + :vartype key: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -6844,6 +9541,28 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword url: Required. URL for Azure Search service. Type: string (or Expression with + resultType string). + :paramtype url: any + :keyword key: Admin Key for Azure Search service. + :paramtype key: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AzureSearchLinkedService, self).__init__(**kwargs) self.type = 'AzureSearch' # type: str self.url = kwargs['url'] @@ -6856,45 +9575,45 @@ class AzureSqlDatabaseLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to authenticate against Azure + :vartype connection_string: any + :ivar password: The Azure key vault secret reference of password in connection string. + :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar service_principal_id: The ID of the service principal used to authenticate against Azure SQL Database. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: The key of the service principal used to authenticate against + :vartype service_principal_id: any + :ivar service_principal_key: The key of the service principal used to authenticate against Azure SQL Database. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: any - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string + (or Expression with resultType string). + :vartype tenant: any + :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype azure_cloud_type: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param always_encrypted_settings: Sql always encrypted properties. - :type always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype encrypted_credential: any + :ivar always_encrypted_settings: Sql always encrypted properties. + :vartype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -6924,6 +9643,46 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword password: The Azure key vault secret reference of password in connection string. + :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword service_principal_id: The ID of the service principal used to authenticate against + Azure SQL Database. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: The key of the service principal used to authenticate against + Azure SQL Database. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :paramtype tenant: any + :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. + Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is + the data factory regions’ cloud type. Type: string (or Expression with resultType string). + :paramtype azure_cloud_type: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword always_encrypted_settings: Sql always encrypted properties. + :paramtype always_encrypted_settings: + ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(AzureSqlDatabaseLinkedService, self).__init__(**kwargs) self.type = 'AzureSqlDatabase' # type: str self.connection_string = kwargs['connection_string'] @@ -6942,43 +9701,43 @@ class AzureSqlDWLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to authenticate against Azure + :vartype connection_string: any + :ivar password: The Azure key vault secret reference of password in connection string. + :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar service_principal_id: The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: The key of the service principal used to authenticate against + :vartype service_principal_id: any + :ivar service_principal_key: The key of the service principal used to authenticate against Azure SQL Data Warehouse. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: any - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string + (or Expression with resultType string). + :vartype tenant: any + :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype azure_cloud_type: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype encrypted_credential: any + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -7007,6 +9766,43 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword password: The Azure key vault secret reference of password in connection string. + :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword service_principal_id: The ID of the service principal used to authenticate against + Azure SQL Data Warehouse. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: The key of the service principal used to authenticate against + Azure SQL Data Warehouse. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :paramtype tenant: any + :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. + Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is + the data factory regions’ cloud type. Type: string (or Expression with resultType string). + :paramtype azure_cloud_type: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(AzureSqlDWLinkedService, self).__init__(**kwargs) self.type = 'AzureSqlDW' # type: str self.connection_string = kwargs['connection_string'] @@ -7024,37 +9820,37 @@ class AzureSqlDWTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param schema_type_properties_schema: The schema name of the Azure SQL Data Warehouse. Type: + :vartype table_name: any + :ivar schema_type_properties_schema: The schema name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any - :param table: The table name of the Azure SQL Data Warehouse. Type: string (or Expression with + :vartype schema_type_properties_schema: any + :ivar table: The table name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). - :type table: any + :vartype table: any """ _validation = { @@ -7081,6 +9877,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword schema_type_properties_schema: The schema name of the Azure SQL Data Warehouse. Type: + string (or Expression with resultType string). + :paramtype schema_type_properties_schema: any + :keyword table: The table name of the Azure SQL Data Warehouse. Type: string (or Expression + with resultType string). + :paramtype table: any + """ super(AzureSqlDWTableDataset, self).__init__(**kwargs) self.type = 'AzureSqlDWTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -7093,45 +9920,45 @@ class AzureSqlMILinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to authenticate against Azure + :vartype connection_string: any + :ivar password: The Azure key vault secret reference of password in connection string. + :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar service_principal_id: The ID of the service principal used to authenticate against Azure SQL Managed Instance. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: The key of the service principal used to authenticate against + :vartype service_principal_id: any + :ivar service_principal_key: The key of the service principal used to authenticate against Azure SQL Managed Instance. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: any - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string + (or Expression with resultType string). + :vartype tenant: any + :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype azure_cloud_type: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param always_encrypted_settings: Sql always encrypted properties. - :type always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype encrypted_credential: any + :ivar always_encrypted_settings: Sql always encrypted properties. + :vartype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -7161,6 +9988,46 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword password: The Azure key vault secret reference of password in connection string. + :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword service_principal_id: The ID of the service principal used to authenticate against + Azure SQL Managed Instance. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: The key of the service principal used to authenticate against + Azure SQL Managed Instance. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :paramtype tenant: any + :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. + Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is + the data factory regions’ cloud type. Type: string (or Expression with resultType string). + :paramtype azure_cloud_type: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword always_encrypted_settings: Sql always encrypted properties. + :paramtype always_encrypted_settings: + ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(AzureSqlMILinkedService, self).__init__(**kwargs) self.type = 'AzureSqlMI' # type: str self.connection_string = kwargs['connection_string'] @@ -7179,37 +10046,37 @@ class AzureSqlMITableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param schema_type_properties_schema: The schema name of the Azure SQL Managed Instance. Type: + :vartype table_name: any + :ivar schema_type_properties_schema: The schema name of the Azure SQL Managed Instance. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any - :param table: The table name of the Azure SQL Managed Instance dataset. Type: string (or + :vartype schema_type_properties_schema: any + :ivar table: The table name of the Azure SQL Managed Instance dataset. Type: string (or Expression with resultType string). - :type table: any + :vartype table: any """ _validation = { @@ -7236,6 +10103,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword schema_type_properties_schema: The schema name of the Azure SQL Managed Instance. + Type: string (or Expression with resultType string). + :paramtype schema_type_properties_schema: any + :keyword table: The table name of the Azure SQL Managed Instance dataset. Type: string (or + Expression with resultType string). + :paramtype table: any + """ super(AzureSqlMITableDataset, self).__init__(**kwargs) self.type = 'AzureSqlMITable' # type: str self.table_name = kwargs.get('table_name', None) @@ -7248,55 +10146,55 @@ class AzureSqlSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + :vartype disable_metrics_collection: any + :ivar sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: any - :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with - resultType string). - :type sql_writer_table_type: any - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + :vartype sql_writer_stored_procedure_name: any + :ivar sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType + string). + :vartype sql_writer_table_type: any + :ivar pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: any - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, + :vartype pre_copy_script: any + :ivar stored_procedure_parameters: SQL stored procedure parameters. + :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + :ivar stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). - :type stored_procedure_table_type_parameter_name: any - :param table_option: The option to handle sink table, such as autoCreate. For now only + :vartype stored_procedure_table_type_parameter_name: any + :ivar table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: any - :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + :vartype table_option: any + :ivar sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). - :type sql_writer_use_table_lock: any - :param write_behavior: Write behavior when copying data into Azure SQL. Type: + :vartype sql_writer_use_table_lock: any + :ivar write_behavior: Write behavior when copying data into Azure SQL. Type: SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). - :type write_behavior: any - :param upsert_settings: SQL upsert settings. - :type upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings + :vartype write_behavior: any + :ivar upsert_settings: SQL upsert settings. + :vartype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings """ _validation = { @@ -7327,6 +10225,55 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + Expression with resultType string). + :paramtype sql_writer_stored_procedure_name: any + :keyword sql_writer_table_type: SQL writer table type. Type: string (or Expression with + resultType string). + :paramtype sql_writer_table_type: any + :keyword pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :paramtype pre_copy_script: any + :keyword stored_procedure_parameters: SQL stored procedure parameters. + :paramtype stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :keyword stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + table type. Type: string (or Expression with resultType string). + :paramtype stored_procedure_table_type_parameter_name: any + :keyword table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :paramtype table_option: any + :keyword sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean + (or Expression with resultType boolean). + :paramtype sql_writer_use_table_lock: any + :keyword write_behavior: Write behavior when copying data into Azure SQL. Type: + SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + :paramtype write_behavior: any + :keyword upsert_settings: SQL upsert settings. + :paramtype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings + """ super(AzureSqlSink, self).__init__(**kwargs) self.type = 'AzureSqlSink' # type: str self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) @@ -7345,46 +10292,46 @@ class AzureSqlSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: any - :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database - source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression - with resultType string). - :type sql_reader_stored_procedure_name: any - :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + :vartype additional_columns: any + :ivar sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :vartype sql_reader_query: any + :ivar sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. + This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with + resultType string). + :vartype sql_reader_stored_procedure_name: any + :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, + :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: any - :param partition_option: The partition mechanism that will be used for Sql read in parallel. + :ivar produce_additional_types: Which additional types to produce. + :vartype produce_additional_types: any + :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: any - :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings + :vartype partition_option: any + :ivar partition_settings: The settings that will be leveraged for Sql source partitioning. + :vartype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -7412,6 +10359,47 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword sql_reader_query: SQL reader query. Type: string (or Expression with resultType + string). + :paramtype sql_reader_query: any + :keyword sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database + source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression + with resultType string). + :paramtype sql_reader_stored_procedure_name: any + :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :paramtype stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :keyword produce_additional_types: Which additional types to produce. + :paramtype produce_additional_types: any + :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :paramtype partition_option: any + :keyword partition_settings: The settings that will be leveraged for Sql source partitioning. + :paramtype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings + """ super(AzureSqlSource, self).__init__(**kwargs) self.type = 'AzureSqlSource' # type: str self.sql_reader_query = kwargs.get('sql_reader_query', None) @@ -7427,37 +10415,37 @@ class AzureSqlTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param schema_type_properties_schema: The schema name of the Azure SQL database. Type: string + :vartype table_name: any + :ivar schema_type_properties_schema: The schema name of the Azure SQL database. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any - :param table: The table name of the Azure SQL database. Type: string (or Expression with + :vartype schema_type_properties_schema: any + :ivar table: The table name of the Azure SQL database. Type: string (or Expression with resultType string). - :type table: any + :vartype table: any """ _validation = { @@ -7484,6 +10472,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword schema_type_properties_schema: The schema name of the Azure SQL database. Type: string + (or Expression with resultType string). + :paramtype schema_type_properties_schema: any + :keyword table: The table name of the Azure SQL database. Type: string (or Expression with + resultType string). + :paramtype table: any + """ super(AzureSqlTableDataset, self).__init__(**kwargs) self.type = 'AzureSqlTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -7496,33 +10515,33 @@ class AzureStorageLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: The connection string. It is mutually exclusive with sasUri property. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with + :vartype connection_string: any + :ivar account_key: The Azure key vault secret reference of accountKey in connection string. + :vartype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: any - :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype sas_uri: any + :ivar sas_token: The Azure key vault secret reference of sasToken in sas uri. + :vartype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str + :vartype encrypted_credential: str """ _validation = { @@ -7547,6 +10566,33 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: The connection string. It is mutually exclusive with sasUri + property. Type: string, SecureString or AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword account_key: The Azure key vault secret reference of accountKey in connection string. + :paramtype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with + connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. + :paramtype sas_uri: any + :keyword sas_token: The Azure key vault secret reference of sasToken in sas uri. + :paramtype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: str + """ super(AzureStorageLinkedService, self).__init__(**kwargs) self.type = 'AzureStorage' # type: str self.connection_string = kwargs.get('connection_string', None) @@ -7561,31 +10607,31 @@ class AzureTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: Required. The table name of the Azure Table storage. Type: string (or + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: Required. The table name of the Azure Table storage. Type: string (or Expression with resultType string). - :type table_name: any + :vartype table_name: any """ _validation = { @@ -7611,6 +10657,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: Required. The table name of the Azure Table storage. Type: string (or + Expression with resultType string). + :paramtype table_name: any + """ super(AzureTableDataset, self).__init__(**kwargs) self.type = 'AzureTable' # type: str self.table_name = kwargs['table_name'] @@ -7621,41 +10692,41 @@ class AzureTableSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param azure_table_default_partition_key_value: Azure Table default partition key value. Type: + :vartype disable_metrics_collection: any + :ivar azure_table_default_partition_key_value: Azure Table default partition key value. Type: string (or Expression with resultType string). - :type azure_table_default_partition_key_value: any - :param azure_table_partition_key_name: Azure Table partition key name. Type: string (or + :vartype azure_table_default_partition_key_value: any + :ivar azure_table_partition_key_name: Azure Table partition key name. Type: string (or Expression with resultType string). - :type azure_table_partition_key_name: any - :param azure_table_row_key_name: Azure Table row key name. Type: string (or Expression with + :vartype azure_table_partition_key_name: any + :ivar azure_table_row_key_name: Azure Table row key name. Type: string (or Expression with resultType string). - :type azure_table_row_key_name: any - :param azure_table_insert_type: Azure Table insert type. Type: string (or Expression with + :vartype azure_table_row_key_name: any + :ivar azure_table_insert_type: Azure Table insert type. Type: string (or Expression with resultType string). - :type azure_table_insert_type: any + :vartype azure_table_insert_type: any """ _validation = { @@ -7681,6 +10752,41 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword azure_table_default_partition_key_value: Azure Table default partition key value. + Type: string (or Expression with resultType string). + :paramtype azure_table_default_partition_key_value: any + :keyword azure_table_partition_key_name: Azure Table partition key name. Type: string (or + Expression with resultType string). + :paramtype azure_table_partition_key_name: any + :keyword azure_table_row_key_name: Azure Table row key name. Type: string (or Expression with + resultType string). + :paramtype azure_table_row_key_name: any + :keyword azure_table_insert_type: Azure Table insert type. Type: string (or Expression with + resultType string). + :paramtype azure_table_insert_type: any + """ super(AzureTableSink, self).__init__(**kwargs) self.type = 'AzureTableSink' # type: str self.azure_table_default_partition_key_value = kwargs.get('azure_table_default_partition_key_value', None) @@ -7694,35 +10800,35 @@ class AzureTableSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param azure_table_source_query: Azure Table source query. Type: string (or Expression with + :vartype additional_columns: any + :ivar azure_table_source_query: Azure Table source query. Type: string (or Expression with resultType string). - :type azure_table_source_query: any - :param azure_table_source_ignore_table_not_found: Azure Table source ignore table not found. + :vartype azure_table_source_query: any + :ivar azure_table_source_ignore_table_not_found: Azure Table source ignore table not found. Type: boolean (or Expression with resultType boolean). - :type azure_table_source_ignore_table_not_found: any + :vartype azure_table_source_ignore_table_not_found: any """ _validation = { @@ -7746,6 +10852,35 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword azure_table_source_query: Azure Table source query. Type: string (or Expression with + resultType string). + :paramtype azure_table_source_query: any + :keyword azure_table_source_ignore_table_not_found: Azure Table source ignore table not found. + Type: boolean (or Expression with resultType boolean). + :paramtype azure_table_source_ignore_table_not_found: any + """ super(AzureTableSource, self).__init__(**kwargs) self.type = 'AzureTableSource' # type: str self.azure_table_source_query = kwargs.get('azure_table_source_query', None) @@ -7757,33 +10892,33 @@ class AzureTableStorageLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: The connection string. It is mutually exclusive with sasUri property. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with + :vartype connection_string: any + :ivar account_key: The Azure key vault secret reference of accountKey in connection string. + :vartype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: any - :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype sas_uri: any + :ivar sas_token: The Azure key vault secret reference of sasToken in sas uri. + :vartype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str + :vartype encrypted_credential: str """ _validation = { @@ -7808,6 +10943,33 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: The connection string. It is mutually exclusive with sasUri + property. Type: string, SecureString or AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword account_key: The Azure key vault secret reference of accountKey in connection string. + :paramtype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with + connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. + :paramtype sas_uri: any + :keyword sas_token: The Azure key vault secret reference of sasToken in sas uri. + :paramtype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: str + """ super(AzureTableStorageLinkedService, self).__init__(**kwargs) self.type = 'AzureTableStorage' # type: str self.connection_string = kwargs.get('connection_string', None) @@ -7822,32 +10984,32 @@ class BinaryDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param location: The location of the Binary storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param compression: The data compression method used for the binary dataset. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar location: The location of the Binary storage. + :vartype location: ~azure.mgmt.datafactory.models.DatasetLocation + :ivar compression: The data compression method used for the binary dataset. + :vartype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -7873,6 +11035,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword location: The location of the Binary storage. + :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation + :keyword compression: The data compression method used for the binary dataset. + :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ super(BinaryDataset, self).__init__(**kwargs) self.type = 'Binary' # type: str self.location = kwargs.get('location', None) @@ -7887,11 +11075,11 @@ class FormatReadSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str """ _validation = { @@ -7911,6 +11099,11 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(FormatReadSettings, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.type = 'FormatReadSettings' # type: str @@ -7921,13 +11114,13 @@ class BinaryReadSettings(FormatReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param compression_properties: Compression settings. - :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar compression_properties: Compression settings. + :vartype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings """ _validation = { @@ -7944,6 +11137,13 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword compression_properties: Compression settings. + :paramtype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings + """ super(BinaryReadSettings, self).__init__(**kwargs) self.type = 'BinaryReadSettings' # type: str self.compression_properties = kwargs.get('compression_properties', None) @@ -7954,31 +11154,31 @@ class BinarySink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: Binary store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :vartype disable_metrics_collection: any + :ivar store_settings: Binary store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings """ _validation = { @@ -8001,6 +11201,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: Binary store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + """ super(BinarySink, self).__init__(**kwargs) self.type = 'BinarySink' # type: str self.store_settings = kwargs.get('store_settings', None) @@ -8011,27 +11236,27 @@ class BinarySource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: Binary store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param format_settings: Binary format settings. - :type format_settings: ~azure.mgmt.datafactory.models.BinaryReadSettings + :vartype disable_metrics_collection: any + :ivar store_settings: Binary store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :ivar format_settings: Binary format settings. + :vartype format_settings: ~azure.mgmt.datafactory.models.BinaryReadSettings """ _validation = { @@ -8053,6 +11278,27 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: Binary store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :keyword format_settings: Binary format settings. + :paramtype format_settings: ~azure.mgmt.datafactory.models.BinaryReadSettings + """ super(BinarySource, self).__init__(**kwargs) self.type = 'BinarySource' # type: str self.store_settings = kwargs.get('store_settings', None) @@ -8069,18 +11315,18 @@ class Trigger(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. Trigger type.Constant filled by server. + :vartype type: str + :ivar description: Trigger description. + :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[any] + :ivar annotations: List of tags that can be used for describing the trigger. + :vartype annotations: list[any] """ _validation = { @@ -8104,6 +11350,15 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Trigger description. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the trigger. + :paramtype annotations: list[any] + """ super(Trigger, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.type = 'Trigger' # type: str @@ -8122,20 +11377,20 @@ class MultiplePipelineTrigger(Trigger): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. Trigger type.Constant filled by server. + :vartype type: str + :ivar description: Trigger description. + :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[any] - :param pipelines: Pipelines that need to be started. - :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :ivar annotations: List of tags that can be used for describing the trigger. + :vartype annotations: list[any] + :ivar pipelines: Pipelines that need to be started. + :vartype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] """ _validation = { @@ -8160,6 +11415,17 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Trigger description. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the trigger. + :paramtype annotations: list[any] + :keyword pipelines: Pipelines that need to be started. + :paramtype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + """ super(MultiplePipelineTrigger, self).__init__(**kwargs) self.type = 'MultiplePipelineTrigger' # type: str self.pipelines = kwargs.get('pipelines', None) @@ -8172,35 +11438,35 @@ class BlobEventsTrigger(MultiplePipelineTrigger): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. Trigger type.Constant filled by server. + :vartype type: str + :ivar description: Trigger description. + :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[any] - :param pipelines: Pipelines that need to be started. - :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param blob_path_begins_with: The blob path must begin with the pattern provided for trigger to + :ivar annotations: List of tags that can be used for describing the trigger. + :vartype annotations: list[any] + :ivar pipelines: Pipelines that need to be started. + :vartype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :ivar blob_path_begins_with: The blob path must begin with the pattern provided for trigger to fire. For example, '/records/blobs/december/' will only fire the trigger for blobs in the december folder under the records container. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. - :type blob_path_begins_with: str - :param blob_path_ends_with: The blob path must end with the pattern provided for trigger to + :vartype blob_path_begins_with: str + :ivar blob_path_ends_with: The blob path must end with the pattern provided for trigger to fire. For example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a december folder. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. - :type blob_path_ends_with: str - :param ignore_empty_blobs: If set to true, blobs with zero bytes will be ignored. - :type ignore_empty_blobs: bool - :param events: Required. The type of events that cause this trigger to fire. - :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] - :param scope: Required. The ARM resource ID of the Storage Account. - :type scope: str + :vartype blob_path_ends_with: str + :ivar ignore_empty_blobs: If set to true, blobs with zero bytes will be ignored. + :vartype ignore_empty_blobs: bool + :ivar events: Required. The type of events that cause this trigger to fire. + :vartype events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] + :ivar scope: Required. The ARM resource ID of the Storage Account. + :vartype scope: str """ _validation = { @@ -8228,6 +11494,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Trigger description. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the trigger. + :paramtype annotations: list[any] + :keyword pipelines: Pipelines that need to be started. + :paramtype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :keyword blob_path_begins_with: The blob path must begin with the pattern provided for trigger + to fire. For example, '/records/blobs/december/' will only fire the trigger for blobs in the + december folder under the records container. At least one of these must be provided: + blobPathBeginsWith, blobPathEndsWith. + :paramtype blob_path_begins_with: str + :keyword blob_path_ends_with: The blob path must end with the pattern provided for trigger to + fire. For example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a + december folder. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. + :paramtype blob_path_ends_with: str + :keyword ignore_empty_blobs: If set to true, blobs with zero bytes will be ignored. + :paramtype ignore_empty_blobs: bool + :keyword events: Required. The type of events that cause this trigger to fire. + :paramtype events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] + :keyword scope: Required. The ARM resource ID of the Storage Account. + :paramtype scope: str + """ super(BlobEventsTrigger, self).__init__(**kwargs) self.type = 'BlobEventsTrigger' # type: str self.blob_path_begins_with = kwargs.get('blob_path_begins_with', None) @@ -8242,43 +11534,43 @@ class BlobSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression + :vartype disable_metrics_collection: any + :ivar blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression with resultType boolean). - :type blob_writer_overwrite_files: any - :param blob_writer_date_time_format: Blob writer date time format. Type: string (or Expression + :vartype blob_writer_overwrite_files: any + :ivar blob_writer_date_time_format: Blob writer date time format. Type: string (or Expression with resultType string). - :type blob_writer_date_time_format: any - :param blob_writer_add_header: Blob writer add header. Type: boolean (or Expression with + :vartype blob_writer_date_time_format: any + :ivar blob_writer_add_header: Blob writer add header. Type: boolean (or Expression with resultType boolean). - :type blob_writer_add_header: any - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: any - :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects + :vartype blob_writer_add_header: any + :ivar copy_behavior: The type of copy behavior for copy sink. + :vartype copy_behavior: any + :ivar metadata: Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). - :type metadata: list[~azure.mgmt.datafactory.models.MetadataItem] + :vartype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] """ _validation = { @@ -8305,6 +11597,43 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression + with resultType boolean). + :paramtype blob_writer_overwrite_files: any + :keyword blob_writer_date_time_format: Blob writer date time format. Type: string (or + Expression with resultType string). + :paramtype blob_writer_date_time_format: any + :keyword blob_writer_add_header: Blob writer add header. Type: boolean (or Expression with + resultType boolean). + :paramtype blob_writer_add_header: any + :keyword copy_behavior: The type of copy behavior for copy sink. + :paramtype copy_behavior: any + :keyword metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :paramtype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] + """ super(BlobSink, self).__init__(**kwargs) self.type = 'BlobSink' # type: str self.blob_writer_overwrite_files = kwargs.get('blob_writer_overwrite_files', None) @@ -8319,32 +11648,32 @@ class BlobSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType + :vartype disable_metrics_collection: any + :ivar treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). - :type treat_empty_as_null: any - :param skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or + :vartype treat_empty_as_null: any + :ivar skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). - :type skip_header_line_count: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype skip_header_line_count: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any + :vartype recursive: any """ _validation = { @@ -8367,6 +11696,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType + boolean). + :paramtype treat_empty_as_null: any + :keyword skip_header_line_count: Number of header lines to skip from each blob. Type: integer + (or Expression with resultType integer). + :paramtype skip_header_line_count: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + """ super(BlobSource, self).__init__(**kwargs) self.type = 'BlobSource' # type: str self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) @@ -8381,27 +11736,27 @@ class BlobTrigger(MultiplePipelineTrigger): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. Trigger type.Constant filled by server. + :vartype type: str + :ivar description: Trigger description. + :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[any] - :param pipelines: Pipelines that need to be started. - :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param folder_path: Required. The path of the container/folder that will trigger the pipeline. - :type folder_path: str - :param max_concurrency: Required. The max number of parallel files to handle when it is + :ivar annotations: List of tags that can be used for describing the trigger. + :vartype annotations: list[any] + :ivar pipelines: Pipelines that need to be started. + :vartype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :ivar folder_path: Required. The path of the container/folder that will trigger the pipeline. + :vartype folder_path: str + :ivar max_concurrency: Required. The max number of parallel files to handle when it is triggered. - :type max_concurrency: int - :param linked_service: Required. The Azure Storage linked service reference. - :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :vartype max_concurrency: int + :ivar linked_service: Required. The Azure Storage linked service reference. + :vartype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { @@ -8428,6 +11783,25 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Trigger description. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the trigger. + :paramtype annotations: list[any] + :keyword pipelines: Pipelines that need to be started. + :paramtype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :keyword folder_path: Required. The path of the container/folder that will trigger the + pipeline. + :paramtype folder_path: str + :keyword max_concurrency: Required. The max number of parallel files to handle when it is + triggered. + :paramtype max_concurrency: int + :keyword linked_service: Required. The Azure Storage linked service reference. + :paramtype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + """ super(BlobTrigger, self).__init__(**kwargs) self.type = 'BlobTrigger' # type: str self.folder_path = kwargs['folder_path'] @@ -8440,37 +11814,36 @@ class CassandraLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. Host name for connection. Type: string (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. Host name for connection. Type: string (or Expression with resultType string). - :type host: any - :param authentication_type: AuthenticationType to be used for connection. Type: string (or + :vartype host: any + :ivar authentication_type: AuthenticationType to be used for connection. Type: string (or Expression with resultType string). - :type authentication_type: any - :param port: The port for the connection. Type: integer (or Expression with resultType - integer). - :type port: any - :param username: Username for authentication. Type: string (or Expression with resultType + :vartype authentication_type: any + :ivar port: The port for the connection. Type: integer (or Expression with resultType integer). + :vartype port: any + :ivar username: Username for authentication. Type: string (or Expression with resultType string). - :type username: any - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype username: any + :ivar password: Password for authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -8497,6 +11870,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. Host name for connection. Type: string (or Expression with resultType + string). + :paramtype host: any + :keyword authentication_type: AuthenticationType to be used for connection. Type: string (or + Expression with resultType string). + :paramtype authentication_type: any + :keyword port: The port for the connection. Type: integer (or Expression with resultType + integer). + :paramtype port: any + :keyword username: Username for authentication. Type: string (or Expression with resultType + string). + :paramtype username: any + :keyword password: Password for authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(CassandraLinkedService, self).__init__(**kwargs) self.type = 'Cassandra' # type: str self.host = kwargs['host'] @@ -8512,39 +11916,39 @@ class CassandraSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language + :vartype additional_columns: any + :ivar query: Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). - :type query: any - :param consistency_level: The consistency level specifies how many Cassandra servers must + :vartype query: any + :ivar consistency_level: The consistency level specifies how many Cassandra servers must respond to a read request before returning data to the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the read request. Must be one of cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is case-insensitive. Possible values include: "ALL", "EACH_QUORUM", "QUORUM", "LOCAL_QUORUM", "ONE", "TWO", "THREE", "LOCAL_ONE", "SERIAL", "LOCAL_SERIAL". - :type consistency_level: str or + :vartype consistency_level: str or ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels """ @@ -8569,6 +11973,40 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: Database query. Should be a SQL-92 query expression or Cassandra Query Language + (CQL) command. Type: string (or Expression with resultType string). + :paramtype query: any + :keyword consistency_level: The consistency level specifies how many Cassandra servers must + respond to a read request before returning data to the client application. Cassandra checks the + specified number of Cassandra servers for data to satisfy the read request. Must be one of + cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is case-insensitive. + Possible values include: "ALL", "EACH_QUORUM", "QUORUM", "LOCAL_QUORUM", "ONE", "TWO", "THREE", + "LOCAL_ONE", "SERIAL", "LOCAL_SERIAL". + :paramtype consistency_level: str or + ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels + """ super(CassandraSource, self).__init__(**kwargs) self.type = 'CassandraSource' # type: str self.query = kwargs.get('query', None) @@ -8580,34 +12018,34 @@ class CassandraTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name of the Cassandra database. Type: string (or Expression with + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name of the Cassandra database. Type: string (or Expression with resultType string). - :type table_name: any - :param keyspace: The keyspace of the Cassandra database. Type: string (or Expression with + :vartype table_name: any + :ivar keyspace: The keyspace of the Cassandra database. Type: string (or Expression with resultType string). - :type keyspace: any + :vartype keyspace: any """ _validation = { @@ -8633,6 +12071,34 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name of the Cassandra database. Type: string (or Expression with + resultType string). + :paramtype table_name: any + :keyword keyspace: The keyspace of the Cassandra database. Type: string (or Expression with + resultType string). + :paramtype keyspace: any + """ super(CassandraTableDataset, self).__init__(**kwargs) self.type = 'CassandraTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -8646,26 +12112,26 @@ class ChainingTrigger(Trigger): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. Trigger type.Constant filled by server. + :vartype type: str + :ivar description: Trigger description. + :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[any] - :param pipeline: Required. Pipeline for which runs are created when all upstream pipelines + :ivar annotations: List of tags that can be used for describing the trigger. + :vartype annotations: list[any] + :ivar pipeline: Required. Pipeline for which runs are created when all upstream pipelines complete successfully. - :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference - :param depends_on: Required. Upstream Pipelines. - :type depends_on: list[~azure.mgmt.datafactory.models.PipelineReference] - :param run_dimension: Required. Run Dimension property that needs to be emitted by upstream + :vartype pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference + :ivar depends_on: Required. Upstream Pipelines. + :vartype depends_on: list[~azure.mgmt.datafactory.models.PipelineReference] + :ivar run_dimension: Required. Run Dimension property that needs to be emitted by upstream pipelines. - :type run_dimension: str + :vartype run_dimension: str """ _validation = { @@ -8691,6 +12157,23 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Trigger description. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the trigger. + :paramtype annotations: list[any] + :keyword pipeline: Required. Pipeline for which runs are created when all upstream pipelines + complete successfully. + :paramtype pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference + :keyword depends_on: Required. Upstream Pipelines. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.PipelineReference] + :keyword run_dimension: Required. Run Dimension property that needs to be emitted by upstream + pipelines. + :paramtype run_dimension: str + """ super(ChainingTrigger, self).__init__(**kwargs) self.type = 'ChainingTrigger' # type: str self.pipeline = kwargs['pipeline'] @@ -8703,14 +12186,14 @@ class CloudError(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param code: Required. Error code. - :type code: str - :param message: Required. Error message. - :type message: str - :param target: Property name/path in request associated with error. - :type target: str - :param details: Array with additional error details. - :type details: list[~azure.mgmt.datafactory.models.CloudError] + :ivar code: Required. Error code. + :vartype code: str + :ivar message: Required. Error message. + :vartype message: str + :ivar target: Property name/path in request associated with error. + :vartype target: str + :ivar details: Array with additional error details. + :vartype details: list[~azure.mgmt.datafactory.models.CloudError] """ _validation = { @@ -8729,6 +12212,16 @@ def __init__( self, **kwargs ): + """ + :keyword code: Required. Error code. + :paramtype code: str + :keyword message: Required. Error message. + :paramtype message: str + :keyword target: Property name/path in request associated with error. + :paramtype target: str + :keyword details: Array with additional error details. + :paramtype details: list[~azure.mgmt.datafactory.models.CloudError] + """ super(CloudError, self).__init__(**kwargs) self.code = kwargs['code'] self.message = kwargs['message'] @@ -8741,14 +12234,14 @@ class CmdkeySetup(CustomSetupBase): All required parameters must be populated in order to send to Azure. - :param type: Required. The type of custom setup.Constant filled by server. - :type type: str - :param target_name: Required. The server name of data source access. - :type target_name: any - :param user_name: Required. The user name of data source access. - :type user_name: any - :param password: Required. The password of data source access. - :type password: ~azure.mgmt.datafactory.models.SecretBase + :ivar type: Required. The type of custom setup.Constant filled by server. + :vartype type: str + :ivar target_name: Required. The server name of data source access. + :vartype target_name: any + :ivar user_name: Required. The user name of data source access. + :vartype user_name: any + :ivar password: Required. The password of data source access. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -8769,6 +12262,14 @@ def __init__( self, **kwargs ): + """ + :keyword target_name: Required. The server name of data source access. + :paramtype target_name: any + :keyword user_name: Required. The user name of data source access. + :paramtype user_name: any + :keyword password: Required. The password of data source access. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + """ super(CmdkeySetup, self).__init__(**kwargs) self.type = 'CmdkeySetup' # type: str self.target_name = kwargs['target_name'] @@ -8779,9 +12280,9 @@ def __init__( class CMKIdentityDefinition(msrest.serialization.Model): """Managed Identity used for CMK. - :param user_assigned_identity: The resource id of the user assigned identity to authenticate to + :ivar user_assigned_identity: The resource id of the user assigned identity to authenticate to customer's key vault. - :type user_assigned_identity: str + :vartype user_assigned_identity: str """ _attribute_map = { @@ -8792,6 +12293,11 @@ def __init__( self, **kwargs ): + """ + :keyword user_assigned_identity: The resource id of the user assigned identity to authenticate + to customer's key vault. + :paramtype user_assigned_identity: str + """ super(CMKIdentityDefinition, self).__init__(**kwargs) self.user_assigned_identity = kwargs.get('user_assigned_identity', None) @@ -8801,31 +12307,31 @@ class CommonDataServiceForAppsEntityDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param entity_name: The logical name of the entity. Type: string (or Expression with resultType + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar entity_name: The logical name of the entity. Type: string (or Expression with resultType string). - :type entity_name: any + :vartype entity_name: any """ _validation = { @@ -8850,6 +12356,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword entity_name: The logical name of the entity. Type: string (or Expression with + resultType string). + :paramtype entity_name: any + """ super(CommonDataServiceForAppsEntityDataset, self).__init__(**kwargs) self.type = 'CommonDataServiceForAppsEntity' # type: str self.entity_name = kwargs.get('entity_name', None) @@ -8860,68 +12391,68 @@ class CommonDataServiceForAppsLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param deployment_type: Required. The deployment type of the Common Data Service for Apps + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar deployment_type: Required. The deployment type of the Common Data Service for Apps instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType string). - :type deployment_type: any - :param host_name: The host name of the on-premises Common Data Service for Apps server. The + :vartype deployment_type: any + :ivar host_name: The host name of the on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). - :type host_name: any - :param port: The port of on-premises Common Data Service for Apps server. The property is + :vartype host_name: any + :ivar port: The port of on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: any - :param service_uri: The URL to the Microsoft Common Data Service for Apps server. The property + :vartype port: any + :ivar service_uri: The URL to the Microsoft Common Data Service for Apps server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type service_uri: any - :param organization_name: The organization name of the Common Data Service for Apps instance. + :vartype service_uri: any + :ivar organization_name: The organization name of the Common Data Service for Apps instance. The property is required for on-prem and required for online when there are more than one Common Data Service for Apps instances associated with the user. Type: string (or Expression with resultType string). - :type organization_name: any - :param authentication_type: Required. The authentication type to connect to Common Data Service + :vartype organization_name: any + :ivar authentication_type: Required. The authentication type to connect to Common Data Service for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). - :type authentication_type: any - :param username: User name to access the Common Data Service for Apps instance. Type: string - (or Expression with resultType string). - :type username: any - :param password: Password to access the Common Data Service for Apps instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_credential_type: The service principal credential type to use in + :vartype authentication_type: any + :ivar username: User name to access the Common Data Service for Apps instance. Type: string (or + Expression with resultType string). + :vartype username: any + :ivar password: Password to access the Common Data Service for Apps instance. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar service_principal_id: The client ID of the application in Azure Active Directory used for + Server-To-Server authentication. Type: string (or Expression with resultType string). + :vartype service_principal_id: any + :ivar service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). - :type service_principal_credential_type: any - :param service_principal_credential: The credential of the service principal object in Azure + :vartype service_principal_credential_type: any + :ivar service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. - :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -8955,6 +12486,68 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword deployment_type: Required. The deployment type of the Common Data Service for Apps + instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common + Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType + string). + :paramtype deployment_type: any + :keyword host_name: The host name of the on-premises Common Data Service for Apps server. The + property is required for on-prem and not allowed for online. Type: string (or Expression with + resultType string). + :paramtype host_name: any + :keyword port: The port of on-premises Common Data Service for Apps server. The property is + required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression + with resultType integer), minimum: 0. + :paramtype port: any + :keyword service_uri: The URL to the Microsoft Common Data Service for Apps server. The + property is required for on-line and not allowed for on-prem. Type: string (or Expression with + resultType string). + :paramtype service_uri: any + :keyword organization_name: The organization name of the Common Data Service for Apps instance. + The property is required for on-prem and required for online when there are more than one + Common Data Service for Apps instances associated with the user. Type: string (or Expression + with resultType string). + :paramtype organization_name: any + :keyword authentication_type: Required. The authentication type to connect to Common Data + Service for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd + scenario. 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: + string (or Expression with resultType string). + :paramtype authentication_type: any + :keyword username: User name to access the Common Data Service for Apps instance. Type: string + (or Expression with resultType string). + :paramtype username: any + :keyword password: Password to access the Common Data Service for Apps instance. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword service_principal_id: The client ID of the application in Azure Active Directory used + for Server-To-Server authentication. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). + :paramtype service_principal_credential_type: any + :keyword service_principal_credential: The credential of the service principal object in Azure + Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', + servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If + servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only + be AzureKeyVaultSecretReference. + :paramtype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(CommonDataServiceForAppsLinkedService, self).__init__(**kwargs) self.type = 'CommonDataServiceForApps' # type: str self.deployment_type = kwargs['deployment_type'] @@ -8976,39 +12569,39 @@ class CommonDataServiceForAppsSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param write_behavior: Required. The write behavior for the operation. Possible values include: + :vartype disable_metrics_collection: any + :ivar write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". - :type write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior - :param ignore_null_values: The flag indicating whether to ignore null values from input dataset + :vartype write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior + :ivar ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: any - :param alternate_key_name: The logical name of the alternate key which will be used when + :vartype ignore_null_values: any + :ivar alternate_key_name: The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). - :type alternate_key_name: any + :vartype alternate_key_name: any """ _validation = { @@ -9034,6 +12627,39 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword write_behavior: Required. The write behavior for the operation. Possible values + include: "Upsert". + :paramtype write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior + :keyword ignore_null_values: The flag indicating whether to ignore null values from input + dataset (except key fields) during write operation. Default is false. Type: boolean (or + Expression with resultType boolean). + :paramtype ignore_null_values: any + :keyword alternate_key_name: The logical name of the alternate key which will be used when + upserting records. Type: string (or Expression with resultType string). + :paramtype alternate_key_name: any + """ super(CommonDataServiceForAppsSink, self).__init__(**kwargs) self.type = 'CommonDataServiceForAppsSink' # type: str self.write_behavior = kwargs['write_behavior'] @@ -9046,29 +12672,29 @@ class CommonDataServiceForAppsSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: FetchXML is a proprietary query language that is used in Microsoft Common Data + :vartype disable_metrics_collection: any + :ivar query: FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). - :type query: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -9090,6 +12716,29 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: FetchXML is a proprietary query language that is used in Microsoft Common Data + Service for Apps (online & on-premises). Type: string (or Expression with resultType string). + :paramtype query: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(CommonDataServiceForAppsSource, self).__init__(**kwargs) self.type = 'CommonDataServiceForAppsSource' # type: str self.query = kwargs.get('query', None) @@ -9101,12 +12750,12 @@ class ComponentSetup(CustomSetupBase): All required parameters must be populated in order to send to Azure. - :param type: Required. The type of custom setup.Constant filled by server. - :type type: str - :param component_name: Required. The name of the 3rd party component. - :type component_name: str - :param license_key: The license key to activate the component. - :type license_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar type: Required. The type of custom setup.Constant filled by server. + :vartype type: str + :ivar component_name: Required. The name of the 3rd party component. + :vartype component_name: str + :ivar license_key: The license key to activate the component. + :vartype license_key: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -9124,6 +12773,12 @@ def __init__( self, **kwargs ): + """ + :keyword component_name: Required. The name of the 3rd party component. + :paramtype component_name: str + :keyword license_key: The license key to activate the component. + :paramtype license_key: ~azure.mgmt.datafactory.models.SecretBase + """ super(ComponentSetup, self).__init__(**kwargs) self.type = 'ComponentSetup' # type: str self.component_name = kwargs['component_name'] @@ -9138,11 +12793,11 @@ class CompressionReadSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The Compression setting type.Constant filled by server. - :type type: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. The Compression setting type.Constant filled by server. + :vartype type: str """ _validation = { @@ -9162,6 +12817,11 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(CompressionReadSettings, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.type = 'CompressionReadSettings' # type: str @@ -9172,43 +12832,43 @@ class ConcurLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_properties: Properties used to connect to Concur. It is mutually exclusive + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_properties: Properties used to connect to Concur. It is mutually exclusive with any other properties in the linked service. Type: object. - :type connection_properties: any - :param client_id: Required. Application client_id supplied by Concur App Management. - :type client_id: any - :param username: Required. The user name that you use to access Concur Service. - :type username: any - :param password: The password corresponding to the user name that you provided in the username + :vartype connection_properties: any + :ivar client_id: Required. Application client_id supplied by Concur App Management. + :vartype client_id: any + :ivar username: Required. The user name that you use to access Concur Service. + :vartype username: any + :ivar password: The password corresponding to the user name that you provided in the username field. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -9238,6 +12898,43 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_properties: Properties used to connect to Concur. It is mutually exclusive + with any other properties in the linked service. Type: object. + :paramtype connection_properties: any + :keyword client_id: Required. Application client_id supplied by Concur App Management. + :paramtype client_id: any + :keyword username: Required. The user name that you use to access Concur Service. + :paramtype username: any + :keyword password: The password corresponding to the user name that you provided in the + username field. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(ConcurLinkedService, self).__init__(**kwargs) self.type = 'Concur' # type: str self.connection_properties = kwargs.get('connection_properties', None) @@ -9255,30 +12952,30 @@ class ConcurObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -9303,6 +13000,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(ConcurObjectDataset, self).__init__(**kwargs) self.type = 'ConcurObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -9313,32 +13034,32 @@ class ConcurSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -9361,6 +13082,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(ConcurSource, self).__init__(**kwargs) self.type = 'ConcurSource' # type: str self.query = kwargs.get('query', None) @@ -9395,6 +13142,8 @@ def __init__( self, **kwargs ): + """ + """ super(ConnectionStateProperties, self).__init__(**kwargs) self.actions_required = None self.description = None @@ -9406,66 +13155,66 @@ class CopyActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param inputs: List of inputs for the activity. - :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] - :param outputs: List of outputs for the activity. - :type outputs: list[~azure.mgmt.datafactory.models.DatasetReference] - :param source: Required. Copy activity source. - :type source: ~azure.mgmt.datafactory.models.CopySource - :param sink: Required. Copy activity sink. - :type sink: ~azure.mgmt.datafactory.models.CopySink - :param translator: Copy activity translator. If not specified, tabular translator is used. - :type translator: any - :param enable_staging: Specifies whether to copy data via an interim staging. Default value is + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar inputs: List of inputs for the activity. + :vartype inputs: list[~azure.mgmt.datafactory.models.DatasetReference] + :ivar outputs: List of outputs for the activity. + :vartype outputs: list[~azure.mgmt.datafactory.models.DatasetReference] + :ivar source: Required. Copy activity source. + :vartype source: ~azure.mgmt.datafactory.models.CopySource + :ivar sink: Required. Copy activity sink. + :vartype sink: ~azure.mgmt.datafactory.models.CopySink + :ivar translator: Copy activity translator. If not specified, tabular translator is used. + :vartype translator: any + :ivar enable_staging: Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). - :type enable_staging: any - :param staging_settings: Specifies interim staging settings when EnableStaging is true. - :type staging_settings: ~azure.mgmt.datafactory.models.StagingSettings - :param parallel_copies: Maximum number of concurrent sessions opened on the source or sink to + :vartype enable_staging: any + :ivar staging_settings: Specifies interim staging settings when EnableStaging is true. + :vartype staging_settings: ~azure.mgmt.datafactory.models.StagingSettings + :ivar parallel_copies: Maximum number of concurrent sessions opened on the source or sink to avoid overloading the data store. Type: integer (or Expression with resultType integer), minimum: 0. - :type parallel_copies: any - :param data_integration_units: Maximum number of data integration units that can be used to + :vartype parallel_copies: any + :ivar data_integration_units: Maximum number of data integration units that can be used to perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. - :type data_integration_units: any - :param enable_skip_incompatible_row: Whether to skip incompatible row. Default value is false. + :vartype data_integration_units: any + :ivar enable_skip_incompatible_row: Whether to skip incompatible row. Default value is false. Type: boolean (or Expression with resultType boolean). - :type enable_skip_incompatible_row: any - :param redirect_incompatible_row_settings: Redirect incompatible row settings when + :vartype enable_skip_incompatible_row: any + :ivar redirect_incompatible_row_settings: Redirect incompatible row settings when EnableSkipIncompatibleRow is true. - :type redirect_incompatible_row_settings: + :vartype redirect_incompatible_row_settings: ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings - :param log_storage_settings: (Deprecated. Please use LogSettings) Log storage settings customer + :ivar log_storage_settings: (Deprecated. Please use LogSettings) Log storage settings customer need to provide when enabling session log. - :type log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings - :param log_settings: Log settings customer needs provide when enabling log. - :type log_settings: ~azure.mgmt.datafactory.models.LogSettings - :param preserve_rules: Preserve Rules. - :type preserve_rules: list[any] - :param preserve: Preserve rules. - :type preserve: list[any] - :param validate_data_consistency: Whether to enable Data Consistency validation. Type: boolean + :vartype log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings + :ivar log_settings: Log settings customer needs provide when enabling log. + :vartype log_settings: ~azure.mgmt.datafactory.models.LogSettings + :ivar preserve_rules: Preserve Rules. + :vartype preserve_rules: list[any] + :ivar preserve: Preserve rules. + :vartype preserve: list[any] + :ivar validate_data_consistency: Whether to enable Data Consistency validation. Type: boolean (or Expression with resultType boolean). - :type validate_data_consistency: any - :param skip_error_file: Specify the fault tolerance for data consistency. - :type skip_error_file: ~azure.mgmt.datafactory.models.SkipErrorFile + :vartype validate_data_consistency: any + :ivar skip_error_file: Specify the fault tolerance for data consistency. + :vartype skip_error_file: ~azure.mgmt.datafactory.models.SkipErrorFile """ _validation = { @@ -9507,6 +13256,66 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword inputs: List of inputs for the activity. + :paramtype inputs: list[~azure.mgmt.datafactory.models.DatasetReference] + :keyword outputs: List of outputs for the activity. + :paramtype outputs: list[~azure.mgmt.datafactory.models.DatasetReference] + :keyword source: Required. Copy activity source. + :paramtype source: ~azure.mgmt.datafactory.models.CopySource + :keyword sink: Required. Copy activity sink. + :paramtype sink: ~azure.mgmt.datafactory.models.CopySink + :keyword translator: Copy activity translator. If not specified, tabular translator is used. + :paramtype translator: any + :keyword enable_staging: Specifies whether to copy data via an interim staging. Default value + is false. Type: boolean (or Expression with resultType boolean). + :paramtype enable_staging: any + :keyword staging_settings: Specifies interim staging settings when EnableStaging is true. + :paramtype staging_settings: ~azure.mgmt.datafactory.models.StagingSettings + :keyword parallel_copies: Maximum number of concurrent sessions opened on the source or sink to + avoid overloading the data store. Type: integer (or Expression with resultType integer), + minimum: 0. + :paramtype parallel_copies: any + :keyword data_integration_units: Maximum number of data integration units that can be used to + perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. + :paramtype data_integration_units: any + :keyword enable_skip_incompatible_row: Whether to skip incompatible row. Default value is + false. Type: boolean (or Expression with resultType boolean). + :paramtype enable_skip_incompatible_row: any + :keyword redirect_incompatible_row_settings: Redirect incompatible row settings when + EnableSkipIncompatibleRow is true. + :paramtype redirect_incompatible_row_settings: + ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings + :keyword log_storage_settings: (Deprecated. Please use LogSettings) Log storage settings + customer need to provide when enabling session log. + :paramtype log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings + :keyword log_settings: Log settings customer needs provide when enabling log. + :paramtype log_settings: ~azure.mgmt.datafactory.models.LogSettings + :keyword preserve_rules: Preserve Rules. + :paramtype preserve_rules: list[any] + :keyword preserve: Preserve rules. + :paramtype preserve: list[any] + :keyword validate_data_consistency: Whether to enable Data Consistency validation. Type: + boolean (or Expression with resultType boolean). + :paramtype validate_data_consistency: any + :keyword skip_error_file: Specify the fault tolerance for data consistency. + :paramtype skip_error_file: ~azure.mgmt.datafactory.models.SkipErrorFile + """ super(CopyActivity, self).__init__(**kwargs) self.type = 'Copy' # type: str self.inputs = kwargs.get('inputs', None) @@ -9531,12 +13340,12 @@ def __init__( class CopyActivityLogSettings(msrest.serialization.Model): """Settings for copy activity log. - :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or + :ivar log_level: Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). - :type log_level: any - :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or + :vartype log_level: any + :ivar enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). - :type enable_reliable_logging: any + :vartype enable_reliable_logging: any """ _attribute_map = { @@ -9548,6 +13357,14 @@ def __init__( self, **kwargs ): + """ + :keyword log_level: Gets or sets the log level, support: Info, Warning. Type: string (or + Expression with resultType string). + :paramtype log_level: any + :keyword enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean + (or Expression with resultType boolean). + :paramtype enable_reliable_logging: any + """ super(CopyActivityLogSettings, self).__init__(**kwargs) self.log_level = kwargs.get('log_level', None) self.enable_reliable_logging = kwargs.get('enable_reliable_logging', None) @@ -9561,11 +13378,11 @@ class CopyTranslator(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy translator type.Constant filled by server. - :type type: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy translator type.Constant filled by server. + :vartype type: str """ _validation = { @@ -9585,6 +13402,11 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(CopyTranslator, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.type = 'CopyTranslator' # type: str @@ -9595,59 +13417,59 @@ class CosmosDbLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: The connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param account_endpoint: The endpoint of the Azure CosmosDB account. Type: string (or - Expression with resultType string). - :type account_endpoint: any - :param database: The name of the database. Type: string (or Expression with resultType string). - :type database: any - :param account_key: The account key of the Azure CosmosDB account. Type: SecureString or + :vartype connection_string: any + :ivar account_endpoint: The endpoint of the Azure CosmosDB account. Type: string (or Expression + with resultType string). + :vartype account_endpoint: any + :ivar database: The name of the database. Type: string (or Expression with resultType string). + :vartype database: any + :ivar account_key: The account key of the Azure CosmosDB account. Type: SecureString or AzureKeyVaultSecretReference. - :type account_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_credential_type: The service principal credential type to use in + :vartype account_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar service_principal_id: The client ID of the application in Azure Active Directory used for + Server-To-Server authentication. Type: string (or Expression with resultType string). + :vartype service_principal_id: any + :ivar service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). Possible values include: "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or + :vartype service_principal_credential_type: str or ~azure.mgmt.datafactory.models.CosmosDbServicePrincipalCredentialType - :param service_principal_credential: The credential of the service principal object in Azure + :ivar service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. - :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: any - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + :vartype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string + (or Expression with resultType string). + :vartype tenant: any + :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: any - :param connection_mode: The connection mode used to access CosmosDB account. Type: string (or + :vartype azure_cloud_type: any + :ivar connection_mode: The connection mode used to access CosmosDB account. Type: string (or Expression with resultType string). Possible values include: "Gateway", "Direct". - :type connection_mode: str or ~azure.mgmt.datafactory.models.CosmosDbConnectionMode - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_mode: str or ~azure.mgmt.datafactory.models.CosmosDbConnectionMode + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -9678,6 +13500,60 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword account_endpoint: The endpoint of the Azure CosmosDB account. Type: string (or + Expression with resultType string). + :paramtype account_endpoint: any + :keyword database: The name of the database. Type: string (or Expression with resultType + string). + :paramtype database: any + :keyword account_key: The account key of the Azure CosmosDB account. Type: SecureString or + AzureKeyVaultSecretReference. + :paramtype account_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword service_principal_id: The client ID of the application in Azure Active Directory used + for Server-To-Server authentication. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). Possible values include: + "ServicePrincipalKey", "ServicePrincipalCert". + :paramtype service_principal_credential_type: str or + ~azure.mgmt.datafactory.models.CosmosDbServicePrincipalCredentialType + :keyword service_principal_credential: The credential of the service principal object in Azure + Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', + servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If + servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only + be AzureKeyVaultSecretReference. + :paramtype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :paramtype tenant: any + :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. + Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is + the data factory regions’ cloud type. Type: string (or Expression with resultType string). + :paramtype azure_cloud_type: any + :keyword connection_mode: The connection mode used to access CosmosDB account. Type: string (or + Expression with resultType string). Possible values include: "Gateway", "Direct". + :paramtype connection_mode: str or ~azure.mgmt.datafactory.models.CosmosDbConnectionMode + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(CosmosDbLinkedService, self).__init__(**kwargs) self.type = 'CosmosDb' # type: str self.connection_string = kwargs.get('connection_string', None) @@ -9698,31 +13574,31 @@ class CosmosDbMongoDbApiCollectionDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param collection: Required. The collection name of the CosmosDB (MongoDB API) database. Type: + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar collection: Required. The collection name of the CosmosDB (MongoDB API) database. Type: string (or Expression with resultType string). - :type collection: any + :vartype collection: any """ _validation = { @@ -9748,6 +13624,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword collection: Required. The collection name of the CosmosDB (MongoDB API) database. + Type: string (or Expression with resultType string). + :paramtype collection: any + """ super(CosmosDbMongoDbApiCollectionDataset, self).__init__(**kwargs) self.type = 'CosmosDbMongoDbApiCollection' # type: str self.collection = kwargs['collection'] @@ -9758,29 +13659,29 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param is_server_version_above32: Whether the CosmosDB (MongoDB API) server version is higher + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar is_server_version_above32: Whether the CosmosDB (MongoDB API) server version is higher than 3.2. The default value is false. Type: boolean (or Expression with resultType boolean). - :type is_server_version_above32: any - :param connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: string, + :vartype is_server_version_above32: any + :ivar connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param database: Required. The name of the CosmosDB (MongoDB API) database that you want to + :vartype connection_string: any + :ivar database: Required. The name of the CosmosDB (MongoDB API) database that you want to access. Type: string (or Expression with resultType string). - :type database: any + :vartype database: any """ _validation = { @@ -9805,6 +13706,29 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword is_server_version_above32: Whether the CosmosDB (MongoDB API) server version is higher + than 3.2. The default value is false. Type: boolean (or Expression with resultType boolean). + :paramtype is_server_version_above32: any + :keyword connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: + string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword database: Required. The name of the CosmosDB (MongoDB API) database that you want to + access. Type: string (or Expression with resultType string). + :paramtype database: any + """ super(CosmosDbMongoDbApiLinkedService, self).__init__(**kwargs) self.type = 'CosmosDbMongoDbApi' # type: str self.is_server_version_above32 = kwargs.get('is_server_version_above32', None) @@ -9817,33 +13741,33 @@ class CosmosDbMongoDbApiSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + :vartype disable_metrics_collection: any + :ivar write_behavior: Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). - :type write_behavior: any + :vartype write_behavior: any """ _validation = { @@ -9866,6 +13790,33 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword write_behavior: Specifies whether the document with same key to be overwritten + (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :paramtype write_behavior: any + """ super(CosmosDbMongoDbApiSink, self).__init__(**kwargs) self.type = 'CosmosDbMongoDbApiSink' # type: str self.write_behavior = kwargs.get('write_behavior', None) @@ -9876,40 +13827,40 @@ class CosmosDbMongoDbApiSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param filter: Specifies selection filter using query operators. To return all documents in a + :vartype disable_metrics_collection: any + :ivar filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). - :type filter: any - :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties - :param batch_size: Specifies the number of documents to return in each batch of the response + :vartype filter: any + :ivar cursor_methods: Cursor methods for Mongodb query. + :vartype cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :ivar batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). - :type batch_size: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype batch_size: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -9934,6 +13885,40 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword filter: Specifies selection filter using query operators. To return all documents in a + collection, omit this parameter or pass an empty document ({}). Type: string (or Expression + with resultType string). + :paramtype filter: any + :keyword cursor_methods: Cursor methods for Mongodb query. + :paramtype cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :keyword batch_size: Specifies the number of documents to return in each batch of the response + from MongoDB instance. In most cases, modifying the batch size will not affect the user or the + application. This property's main purpose is to avoid hit the limitation of response size. + Type: integer (or Expression with resultType integer). + :paramtype batch_size: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(CosmosDbMongoDbApiSource, self).__init__(**kwargs) self.type = 'CosmosDbMongoDbApiSource' # type: str self.filter = kwargs.get('filter', None) @@ -9948,31 +13933,31 @@ class CosmosDbSqlApiCollectionDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param collection_name: Required. CosmosDB (SQL API) collection name. Type: string (or + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar collection_name: Required. CosmosDB (SQL API) collection name. Type: string (or Expression with resultType string). - :type collection_name: any + :vartype collection_name: any """ _validation = { @@ -9998,6 +13983,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword collection_name: Required. CosmosDB (SQL API) collection name. Type: string (or + Expression with resultType string). + :paramtype collection_name: any + """ super(CosmosDbSqlApiCollectionDataset, self).__init__(**kwargs) self.type = 'CosmosDbSqlApiCollection' # type: str self.collection_name = kwargs['collection_name'] @@ -10008,32 +14018,32 @@ class CosmosDbSqlApiSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or + :vartype disable_metrics_collection: any + :ivar write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. - :type write_behavior: any + :vartype write_behavior: any """ _validation = { @@ -10056,6 +14066,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or + Expression with resultType string). Allowed values: insert and upsert. + :paramtype write_behavior: any + """ super(CosmosDbSqlApiSink, self).__init__(**kwargs) self.type = 'CosmosDbSqlApiSink' # type: str self.write_behavior = kwargs.get('write_behavior', None) @@ -10066,37 +14102,37 @@ class CosmosDbSqlApiSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: SQL API query. Type: string (or Expression with resultType string). - :type query: any - :param page_size: Page size of the result. Type: integer (or Expression with resultType + :vartype disable_metrics_collection: any + :ivar query: SQL API query. Type: string (or Expression with resultType string). + :vartype query: any + :ivar page_size: Page size of the result. Type: integer (or Expression with resultType integer). - :type page_size: any - :param preferred_regions: Preferred regions. Type: array of strings (or Expression with + :vartype page_size: any + :ivar preferred_regions: Preferred regions. Type: array of strings (or Expression with resultType array of strings). - :type preferred_regions: any - :param detect_datetime: Whether detect primitive values as datetime values. Type: boolean (or + :vartype preferred_regions: any + :ivar detect_datetime: Whether detect primitive values as datetime values. Type: boolean (or Expression with resultType boolean). - :type detect_datetime: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype detect_datetime: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -10121,6 +14157,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: SQL API query. Type: string (or Expression with resultType string). + :paramtype query: any + :keyword page_size: Page size of the result. Type: integer (or Expression with resultType + integer). + :paramtype page_size: any + :keyword preferred_regions: Preferred regions. Type: array of strings (or Expression with + resultType array of strings). + :paramtype preferred_regions: any + :keyword detect_datetime: Whether detect primitive values as datetime values. Type: boolean (or + Expression with resultType boolean). + :paramtype detect_datetime: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(CosmosDbSqlApiSource, self).__init__(**kwargs) self.type = 'CosmosDbSqlApiSource' # type: str self.query = kwargs.get('query', None) @@ -10135,28 +14202,28 @@ class CouchbaseLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: An ODBC connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param cred_string: The Azure key vault secret reference of credString in connection string. - :type cred_string: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_string: any + :ivar cred_string: The Azure key vault secret reference of credString in connection string. + :vartype cred_string: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -10179,6 +14246,28 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword cred_string: The Azure key vault secret reference of credString in connection string. + :paramtype cred_string: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(CouchbaseLinkedService, self).__init__(**kwargs) self.type = 'Couchbase' # type: str self.connection_string = kwargs.get('connection_string', None) @@ -10191,32 +14280,32 @@ class CouchbaseSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -10239,6 +14328,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(CouchbaseSource, self).__init__(**kwargs) self.type = 'CouchbaseSource' # type: str self.query = kwargs.get('query', None) @@ -10249,30 +14364,30 @@ class CouchbaseTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -10297,6 +14412,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(CouchbaseTableDataset, self).__init__(**kwargs) self.type = 'CouchbaseTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -10305,16 +14444,16 @@ def __init__( class CreateDataFlowDebugSessionRequest(msrest.serialization.Model): """Request body structure for creating data flow debug session. - :param compute_type: Compute type of the cluster. The value will be overwritten by the same + :ivar compute_type: Compute type of the cluster. The value will be overwritten by the same setting in integration runtime if provided. - :type compute_type: str - :param core_count: Core count of the cluster. The value will be overwritten by the same setting + :vartype compute_type: str + :ivar core_count: Core count of the cluster. The value will be overwritten by the same setting in integration runtime if provided. - :type core_count: int - :param time_to_live: Time to live setting of the cluster in minutes. - :type time_to_live: int - :param integration_runtime: Set to use integration runtime setting for data flow debug session. - :type integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeDebugResource + :vartype core_count: int + :ivar time_to_live: Time to live setting of the cluster in minutes. + :vartype time_to_live: int + :ivar integration_runtime: Set to use integration runtime setting for data flow debug session. + :vartype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeDebugResource """ _attribute_map = { @@ -10328,6 +14467,19 @@ def __init__( self, **kwargs ): + """ + :keyword compute_type: Compute type of the cluster. The value will be overwritten by the same + setting in integration runtime if provided. + :paramtype compute_type: str + :keyword core_count: Core count of the cluster. The value will be overwritten by the same + setting in integration runtime if provided. + :paramtype core_count: int + :keyword time_to_live: Time to live setting of the cluster in minutes. + :paramtype time_to_live: int + :keyword integration_runtime: Set to use integration runtime setting for data flow debug + session. + :paramtype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeDebugResource + """ super(CreateDataFlowDebugSessionRequest, self).__init__(**kwargs) self.compute_type = kwargs.get('compute_type', None) self.core_count = kwargs.get('core_count', None) @@ -10338,10 +14490,10 @@ def __init__( class CreateDataFlowDebugSessionResponse(msrest.serialization.Model): """Response body structure for creating data flow debug session. - :param status: The state of the debug session. - :type status: str - :param session_id: The ID of data flow debug session. - :type session_id: str + :ivar status: The state of the debug session. + :vartype status: str + :ivar session_id: The ID of data flow debug session. + :vartype session_id: str """ _attribute_map = { @@ -10353,6 +14505,12 @@ def __init__( self, **kwargs ): + """ + :keyword status: The state of the debug session. + :paramtype status: str + :keyword session_id: The ID of data flow debug session. + :paramtype session_id: str + """ super(CreateDataFlowDebugSessionResponse, self).__init__(**kwargs) self.status = kwargs.get('status', None) self.session_id = kwargs.get('session_id', None) @@ -10361,17 +14519,17 @@ def __init__( class CreateLinkedIntegrationRuntimeRequest(msrest.serialization.Model): """The linked integration runtime information. - :param name: The name of the linked integration runtime. - :type name: str - :param subscription_id: The ID of the subscription that the linked integration runtime belongs + :ivar name: The name of the linked integration runtime. + :vartype name: str + :ivar subscription_id: The ID of the subscription that the linked integration runtime belongs to. - :type subscription_id: str - :param data_factory_name: The name of the data factory that the linked integration runtime + :vartype subscription_id: str + :ivar data_factory_name: The name of the data factory that the linked integration runtime belongs to. - :type data_factory_name: str - :param data_factory_location: The location of the data factory that the linked integration + :vartype data_factory_name: str + :ivar data_factory_location: The location of the data factory that the linked integration runtime belongs to. - :type data_factory_location: str + :vartype data_factory_location: str """ _attribute_map = { @@ -10385,6 +14543,19 @@ def __init__( self, **kwargs ): + """ + :keyword name: The name of the linked integration runtime. + :paramtype name: str + :keyword subscription_id: The ID of the subscription that the linked integration runtime + belongs to. + :paramtype subscription_id: str + :keyword data_factory_name: The name of the data factory that the linked integration runtime + belongs to. + :paramtype data_factory_name: str + :keyword data_factory_location: The location of the data factory that the linked integration + runtime belongs to. + :paramtype data_factory_location: str + """ super(CreateLinkedIntegrationRuntimeRequest, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.subscription_id = kwargs.get('subscription_id', None) @@ -10397,8 +14568,8 @@ class CreateRunResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param run_id: Required. Identifier of a run. - :type run_id: str + :ivar run_id: Required. Identifier of a run. + :vartype run_id: str """ _validation = { @@ -10413,6 +14584,10 @@ def __init__( self, **kwargs ): + """ + :keyword run_id: Required. Identifier of a run. + :paramtype run_id: str + """ super(CreateRunResponse, self).__init__(**kwargs) self.run_id = kwargs['run_id'] @@ -10425,15 +14600,15 @@ class Credential(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of credential.Constant filled by server. - :type type: str - :param description: Credential description. - :type description: str - :param annotations: List of tags that can be used for describing the Credential. - :type annotations: list[any] + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of credential.Constant filled by server. + :vartype type: str + :ivar description: Credential description. + :vartype description: str + :ivar annotations: List of tags that can be used for describing the Credential. + :vartype annotations: list[any] """ _validation = { @@ -10455,6 +14630,15 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Credential description. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the Credential. + :paramtype annotations: list[any] + """ super(Credential, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.type = 'Credential' # type: str @@ -10469,13 +14653,13 @@ class CredentialReference(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] + :vartype additional_properties: dict[str, any] :ivar type: Credential reference type. Has constant value: "CredentialReference". :vartype type: str - :param reference_name: Required. Reference credential name. - :type reference_name: str + :ivar reference_name: Required. Reference credential name. + :vartype reference_name: str """ _validation = { @@ -10495,6 +14679,13 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword reference_name: Required. Reference credential name. + :paramtype reference_name: str + """ super(CredentialReference, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.reference_name = kwargs['reference_name'] @@ -10533,6 +14724,8 @@ def __init__( self, **kwargs ): + """ + """ super(SubResource, self).__init__(**kwargs) self.id = None self.name = None @@ -10555,8 +14748,8 @@ class CredentialResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Required. Properties of credentials. - :type properties: ~azure.mgmt.datafactory.models.Credential + :ivar properties: Required. Properties of credentials. + :vartype properties: ~azure.mgmt.datafactory.models.Credential """ _validation = { @@ -10579,6 +14772,10 @@ def __init__( self, **kwargs ): + """ + :keyword properties: Required. Properties of credentials. + :paramtype properties: ~azure.mgmt.datafactory.models.Credential + """ super(CredentialResource, self).__init__(**kwargs) self.properties = kwargs['properties'] @@ -10588,43 +14785,43 @@ class CustomActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param command: Required. Command for custom activity Type: string (or Expression with + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar command: Required. Command for custom activity Type: string (or Expression with resultType string). - :type command: any - :param resource_linked_service: Resource linked service reference. - :type resource_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param folder_path: Folder path for resource files Type: string (or Expression with resultType + :vartype command: any + :ivar resource_linked_service: Resource linked service reference. + :vartype resource_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar folder_path: Folder path for resource files Type: string (or Expression with resultType string). - :type folder_path: any - :param reference_objects: Reference objects. - :type reference_objects: ~azure.mgmt.datafactory.models.CustomActivityReferenceObject - :param extended_properties: User defined property bag. There is no restriction on the keys or + :vartype folder_path: any + :ivar reference_objects: Reference objects. + :vartype reference_objects: ~azure.mgmt.datafactory.models.CustomActivityReferenceObject + :ivar extended_properties: User defined property bag. There is no restriction on the keys or values that can be used. The user specified custom activity has the full responsibility to consume and interpret the content defined. - :type extended_properties: dict[str, any] - :param retention_time_in_days: The retention time for the files submitted for custom activity. + :vartype extended_properties: dict[str, any] + :ivar retention_time_in_days: The retention time for the files submitted for custom activity. Type: double (or Expression with resultType double). - :type retention_time_in_days: any - :param auto_user_specification: Elevation level and scope for the user, default is nonadmin + :vartype retention_time_in_days: any + :ivar auto_user_specification: Elevation level and scope for the user, default is nonadmin task. Type: string (or Expression with resultType double). - :type auto_user_specification: any + :vartype auto_user_specification: any """ _validation = { @@ -10655,6 +14852,43 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword command: Required. Command for custom activity Type: string (or Expression with + resultType string). + :paramtype command: any + :keyword resource_linked_service: Resource linked service reference. + :paramtype resource_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword folder_path: Folder path for resource files Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword reference_objects: Reference objects. + :paramtype reference_objects: ~azure.mgmt.datafactory.models.CustomActivityReferenceObject + :keyword extended_properties: User defined property bag. There is no restriction on the keys or + values that can be used. The user specified custom activity has the full responsibility to + consume and interpret the content defined. + :paramtype extended_properties: dict[str, any] + :keyword retention_time_in_days: The retention time for the files submitted for custom + activity. Type: double (or Expression with resultType double). + :paramtype retention_time_in_days: any + :keyword auto_user_specification: Elevation level and scope for the user, default is nonadmin + task. Type: string (or Expression with resultType double). + :paramtype auto_user_specification: any + """ super(CustomActivity, self).__init__(**kwargs) self.type = 'Custom' # type: str self.command = kwargs['command'] @@ -10669,10 +14903,10 @@ def __init__( class CustomActivityReferenceObject(msrest.serialization.Model): """Reference objects for custom activity. - :param linked_services: Linked service references. - :type linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param datasets: Dataset references. - :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] + :ivar linked_services: Linked service references. + :vartype linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :ivar datasets: Dataset references. + :vartype datasets: list[~azure.mgmt.datafactory.models.DatasetReference] """ _attribute_map = { @@ -10684,6 +14918,12 @@ def __init__( self, **kwargs ): + """ + :keyword linked_services: Linked service references. + :paramtype linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :keyword datasets: Dataset references. + :paramtype datasets: list[~azure.mgmt.datafactory.models.DatasetReference] + """ super(CustomActivityReferenceObject, self).__init__(**kwargs) self.linked_services = kwargs.get('linked_services', None) self.datasets = kwargs.get('datasets', None) @@ -10694,30 +14934,30 @@ class CustomDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type_properties: Custom dataset properties. - :type type_properties: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar type_properties: Custom dataset properties. + :vartype type_properties: any """ _validation = { @@ -10742,6 +14982,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword type_properties: Custom dataset properties. + :paramtype type_properties: any + """ super(CustomDataset, self).__init__(**kwargs) self.type = 'CustomDataset' # type: str self.type_properties = kwargs.get('type_properties', None) @@ -10752,21 +15016,21 @@ class CustomDataSourceLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param type_properties: Required. Custom linked service properties. - :type type_properties: any + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar type_properties: Required. Custom linked service properties. + :vartype type_properties: any """ _validation = { @@ -10788,6 +15052,21 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword type_properties: Required. Custom linked service properties. + :paramtype type_properties: any + """ super(CustomDataSourceLinkedService, self).__init__(**kwargs) self.type = 'CustomDataSource' # type: str self.type_properties = kwargs['type_properties'] @@ -10800,30 +15079,30 @@ class CustomEventsTrigger(MultiplePipelineTrigger): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. Trigger type.Constant filled by server. + :vartype type: str + :ivar description: Trigger description. + :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[any] - :param pipelines: Pipelines that need to be started. - :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param subject_begins_with: The event subject must begin with the pattern provided for trigger + :ivar annotations: List of tags that can be used for describing the trigger. + :vartype annotations: list[any] + :ivar pipelines: Pipelines that need to be started. + :vartype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :ivar subject_begins_with: The event subject must begin with the pattern provided for trigger to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. - :type subject_begins_with: str - :param subject_ends_with: The event subject must end with the pattern provided for trigger to + :vartype subject_begins_with: str + :ivar subject_ends_with: The event subject must end with the pattern provided for trigger to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. - :type subject_ends_with: str - :param events: Required. The list of event types that cause this trigger to fire. - :type events: list[any] - :param scope: Required. The ARM resource ID of the Azure Event Grid Topic. - :type scope: str + :vartype subject_ends_with: str + :ivar events: Required. The list of event types that cause this trigger to fire. + :vartype events: list[any] + :ivar scope: Required. The ARM resource ID of the Azure Event Grid Topic. + :vartype scope: str """ _validation = { @@ -10850,6 +15129,27 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Trigger description. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the trigger. + :paramtype annotations: list[any] + :keyword pipelines: Pipelines that need to be started. + :paramtype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :keyword subject_begins_with: The event subject must begin with the pattern provided for + trigger to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. + :paramtype subject_begins_with: str + :keyword subject_ends_with: The event subject must end with the pattern provided for trigger to + fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. + :paramtype subject_ends_with: str + :keyword events: Required. The list of event types that cause this trigger to fire. + :paramtype events: list[any] + :keyword scope: Required. The ARM resource ID of the Azure Event Grid Topic. + :paramtype scope: str + """ super(CustomEventsTrigger, self).__init__(**kwargs) self.type = 'CustomEventsTrigger' # type: str self.subject_begins_with = kwargs.get('subject_begins_with', None) @@ -10863,32 +15163,32 @@ class DatabricksNotebookActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param notebook_path: Required. The absolute path of the notebook to be run in the Databricks + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar notebook_path: Required. The absolute path of the notebook to be run in the Databricks Workspace. This path must begin with a slash. Type: string (or Expression with resultType string). - :type notebook_path: any - :param base_parameters: Base parameters to be used for each run of this job.If the notebook + :vartype notebook_path: any + :ivar base_parameters: Base parameters to be used for each run of this job.If the notebook takes a parameter that is not specified, the default value from the notebook will be used. - :type base_parameters: dict[str, any] - :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, any]] + :vartype base_parameters: dict[str, any] + :ivar libraries: A list of libraries to be installed on the cluster that will execute the job. + :vartype libraries: list[dict[str, any]] """ _validation = { @@ -10915,6 +15215,33 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword notebook_path: Required. The absolute path of the notebook to be run in the Databricks + Workspace. This path must begin with a slash. Type: string (or Expression with resultType + string). + :paramtype notebook_path: any + :keyword base_parameters: Base parameters to be used for each run of this job.If the notebook + takes a parameter that is not specified, the default value from the notebook will be used. + :paramtype base_parameters: dict[str, any] + :keyword libraries: A list of libraries to be installed on the cluster that will execute the + job. + :paramtype libraries: list[dict[str, any]] + """ super(DatabricksNotebookActivity, self).__init__(**kwargs) self.type = 'DatabricksNotebook' # type: str self.notebook_path = kwargs['notebook_path'] @@ -10927,31 +15254,31 @@ class DatabricksSparkJarActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param main_class_name: Required. The full name of the class containing the main method to be + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar main_class_name: Required. The full name of the class containing the main method to be executed. This class must be contained in a JAR provided as a library. Type: string (or Expression with resultType string). - :type main_class_name: any - :param parameters: Parameters that will be passed to the main method. - :type parameters: list[any] - :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, any]] + :vartype main_class_name: any + :ivar parameters: Parameters that will be passed to the main method. + :vartype parameters: list[any] + :ivar libraries: A list of libraries to be installed on the cluster that will execute the job. + :vartype libraries: list[dict[str, any]] """ _validation = { @@ -10978,6 +15305,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword main_class_name: Required. The full name of the class containing the main method to be + executed. This class must be contained in a JAR provided as a library. Type: string (or + Expression with resultType string). + :paramtype main_class_name: any + :keyword parameters: Parameters that will be passed to the main method. + :paramtype parameters: list[any] + :keyword libraries: A list of libraries to be installed on the cluster that will execute the + job. + :paramtype libraries: list[dict[str, any]] + """ super(DatabricksSparkJarActivity, self).__init__(**kwargs) self.type = 'DatabricksSparkJar' # type: str self.main_class_name = kwargs['main_class_name'] @@ -10990,30 +15343,30 @@ class DatabricksSparkPythonActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param python_file: Required. The URI of the Python file to be executed. DBFS paths are + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar python_file: Required. The URI of the Python file to be executed. DBFS paths are supported. Type: string (or Expression with resultType string). - :type python_file: any - :param parameters: Command line parameters that will be passed to the Python file. - :type parameters: list[any] - :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, any]] + :vartype python_file: any + :ivar parameters: Command line parameters that will be passed to the Python file. + :vartype parameters: list[any] + :ivar libraries: A list of libraries to be installed on the cluster that will execute the job. + :vartype libraries: list[dict[str, any]] """ _validation = { @@ -11040,6 +15393,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword python_file: Required. The URI of the Python file to be executed. DBFS paths are + supported. Type: string (or Expression with resultType string). + :paramtype python_file: any + :keyword parameters: Command line parameters that will be passed to the Python file. + :paramtype parameters: list[any] + :keyword libraries: A list of libraries to be installed on the cluster that will execute the + job. + :paramtype libraries: list[dict[str, any]] + """ super(DatabricksSparkPythonActivity, self).__init__(**kwargs) self.type = 'DatabricksSparkPython' # type: str self.python_file = kwargs['python_file'] @@ -11055,15 +15433,15 @@ class DataFlow(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of data flow.Constant filled by server. - :type type: str - :param description: The description of the data flow. - :type description: str - :param annotations: List of tags that can be used for describing the data flow. - :type annotations: list[any] - :param folder: The folder that this data flow is in. If not specified, Data flow will appear at + :ivar type: Required. Type of data flow.Constant filled by server. + :vartype type: str + :ivar description: The description of the data flow. + :vartype description: str + :ivar annotations: List of tags that can be used for describing the data flow. + :vartype annotations: list[any] + :ivar folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder + :vartype folder: ~azure.mgmt.datafactory.models.DataFlowFolder """ _validation = { @@ -11085,6 +15463,15 @@ def __init__( self, **kwargs ): + """ + :keyword description: The description of the data flow. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the data flow. + :paramtype annotations: list[any] + :keyword folder: The folder that this data flow is in. If not specified, Data flow will appear + at the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DataFlowFolder + """ super(DataFlow, self).__init__(**kwargs) self.type = None # type: Optional[str] self.description = kwargs.get('description', None) @@ -11097,14 +15484,14 @@ class DataFlowDebugCommandPayload(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param stream_name: Required. The stream name which is used for preview. - :type stream_name: str - :param row_limits: Row limits for preview response. - :type row_limits: int - :param columns: Array of column names. - :type columns: list[str] - :param expression: The expression which is used for preview. - :type expression: str + :ivar stream_name: Required. The stream name which is used for preview. + :vartype stream_name: str + :ivar row_limits: Row limits for preview response. + :vartype row_limits: int + :ivar columns: Array of column names. + :vartype columns: list[str] + :ivar expression: The expression which is used for preview. + :vartype expression: str """ _validation = { @@ -11122,6 +15509,16 @@ def __init__( self, **kwargs ): + """ + :keyword stream_name: Required. The stream name which is used for preview. + :paramtype stream_name: str + :keyword row_limits: Row limits for preview response. + :paramtype row_limits: int + :keyword columns: Array of column names. + :paramtype columns: list[str] + :keyword expression: The expression which is used for preview. + :paramtype expression: str + """ super(DataFlowDebugCommandPayload, self).__init__(**kwargs) self.stream_name = kwargs['stream_name'] self.row_limits = kwargs.get('row_limits', None) @@ -11132,13 +15529,13 @@ def __init__( class DataFlowDebugCommandRequest(msrest.serialization.Model): """Request body structure for data flow debug command. - :param session_id: The ID of data flow debug session. - :type session_id: str - :param command: The command type. Possible values include: "executePreviewQuery", + :ivar session_id: The ID of data flow debug session. + :vartype session_id: str + :ivar command: The command type. Possible values include: "executePreviewQuery", "executeStatisticsQuery", "executeExpressionQuery". - :type command: str or ~azure.mgmt.datafactory.models.DataFlowDebugCommandType - :param command_payload: The command payload object. - :type command_payload: ~azure.mgmt.datafactory.models.DataFlowDebugCommandPayload + :vartype command: str or ~azure.mgmt.datafactory.models.DataFlowDebugCommandType + :ivar command_payload: The command payload object. + :vartype command_payload: ~azure.mgmt.datafactory.models.DataFlowDebugCommandPayload """ _attribute_map = { @@ -11151,6 +15548,15 @@ def __init__( self, **kwargs ): + """ + :keyword session_id: The ID of data flow debug session. + :paramtype session_id: str + :keyword command: The command type. Possible values include: "executePreviewQuery", + "executeStatisticsQuery", "executeExpressionQuery". + :paramtype command: str or ~azure.mgmt.datafactory.models.DataFlowDebugCommandType + :keyword command_payload: The command payload object. + :paramtype command_payload: ~azure.mgmt.datafactory.models.DataFlowDebugCommandPayload + """ super(DataFlowDebugCommandRequest, self).__init__(**kwargs) self.session_id = kwargs.get('session_id', None) self.command = kwargs.get('command', None) @@ -11160,10 +15566,10 @@ def __init__( class DataFlowDebugCommandResponse(msrest.serialization.Model): """Response body structure of data flow result for data preview, statistics or expression preview. - :param status: The run status of data preview, statistics or expression preview. - :type status: str - :param data: The result data of data preview, statistics or expression preview. - :type data: str + :ivar status: The run status of data preview, statistics or expression preview. + :vartype status: str + :ivar data: The result data of data preview, statistics or expression preview. + :vartype data: str """ _attribute_map = { @@ -11175,6 +15581,12 @@ def __init__( self, **kwargs ): + """ + :keyword status: The run status of data preview, statistics or expression preview. + :paramtype status: str + :keyword data: The result data of data preview, statistics or expression preview. + :paramtype data: str + """ super(DataFlowDebugCommandResponse, self).__init__(**kwargs) self.status = kwargs.get('status', None) self.data = kwargs.get('data', None) @@ -11183,23 +15595,23 @@ def __init__( class DataFlowDebugPackage(msrest.serialization.Model): """Request body structure for starting data flow debug session. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param session_id: The ID of data flow debug session. - :type session_id: str - :param data_flow: Data flow instance. - :type data_flow: ~azure.mgmt.datafactory.models.DataFlowDebugResource - :param data_flows: List of Data flows. - :type data_flows: list[~azure.mgmt.datafactory.models.DataFlowDebugResource] - :param datasets: List of datasets. - :type datasets: list[~azure.mgmt.datafactory.models.DatasetDebugResource] - :param linked_services: List of linked services. - :type linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceDebugResource] - :param staging: Staging info for debug session. - :type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo - :param debug_settings: Data flow debug settings. - :type debug_settings: ~azure.mgmt.datafactory.models.DataFlowDebugPackageDebugSettings + :vartype additional_properties: dict[str, any] + :ivar session_id: The ID of data flow debug session. + :vartype session_id: str + :ivar data_flow: Data flow instance. + :vartype data_flow: ~azure.mgmt.datafactory.models.DataFlowDebugResource + :ivar data_flows: List of Data flows. + :vartype data_flows: list[~azure.mgmt.datafactory.models.DataFlowDebugResource] + :ivar datasets: List of datasets. + :vartype datasets: list[~azure.mgmt.datafactory.models.DatasetDebugResource] + :ivar linked_services: List of linked services. + :vartype linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceDebugResource] + :ivar staging: Staging info for debug session. + :vartype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo + :ivar debug_settings: Data flow debug settings. + :vartype debug_settings: ~azure.mgmt.datafactory.models.DataFlowDebugPackageDebugSettings """ _attribute_map = { @@ -11217,6 +15629,25 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword session_id: The ID of data flow debug session. + :paramtype session_id: str + :keyword data_flow: Data flow instance. + :paramtype data_flow: ~azure.mgmt.datafactory.models.DataFlowDebugResource + :keyword data_flows: List of Data flows. + :paramtype data_flows: list[~azure.mgmt.datafactory.models.DataFlowDebugResource] + :keyword datasets: List of datasets. + :paramtype datasets: list[~azure.mgmt.datafactory.models.DatasetDebugResource] + :keyword linked_services: List of linked services. + :paramtype linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceDebugResource] + :keyword staging: Staging info for debug session. + :paramtype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo + :keyword debug_settings: Data flow debug settings. + :paramtype debug_settings: ~azure.mgmt.datafactory.models.DataFlowDebugPackageDebugSettings + """ super(DataFlowDebugPackage, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.session_id = kwargs.get('session_id', None) @@ -11231,12 +15662,12 @@ def __init__( class DataFlowDebugPackageDebugSettings(msrest.serialization.Model): """Data flow debug settings. - :param source_settings: Source setting for data flow debug. - :type source_settings: list[~azure.mgmt.datafactory.models.DataFlowSourceSetting] - :param parameters: Data flow parameters. - :type parameters: dict[str, any] - :param dataset_parameters: Parameters for dataset. - :type dataset_parameters: any + :ivar source_settings: Source setting for data flow debug. + :vartype source_settings: list[~azure.mgmt.datafactory.models.DataFlowSourceSetting] + :ivar parameters: Data flow parameters. + :vartype parameters: dict[str, any] + :ivar dataset_parameters: Parameters for dataset. + :vartype dataset_parameters: any """ _attribute_map = { @@ -11249,6 +15680,14 @@ def __init__( self, **kwargs ): + """ + :keyword source_settings: Source setting for data flow debug. + :paramtype source_settings: list[~azure.mgmt.datafactory.models.DataFlowSourceSetting] + :keyword parameters: Data flow parameters. + :paramtype parameters: dict[str, any] + :keyword dataset_parameters: Parameters for dataset. + :paramtype dataset_parameters: any + """ super(DataFlowDebugPackageDebugSettings, self).__init__(**kwargs) self.source_settings = kwargs.get('source_settings', None) self.parameters = kwargs.get('parameters', None) @@ -11258,8 +15697,8 @@ def __init__( class SubResourceDebugResource(msrest.serialization.Model): """Azure Data Factory nested debug resource. - :param name: The resource name. - :type name: str + :ivar name: The resource name. + :vartype name: str """ _attribute_map = { @@ -11270,6 +15709,10 @@ def __init__( self, **kwargs ): + """ + :keyword name: The resource name. + :paramtype name: str + """ super(SubResourceDebugResource, self).__init__(**kwargs) self.name = kwargs.get('name', None) @@ -11279,10 +15722,10 @@ class DataFlowDebugResource(SubResourceDebugResource): All required parameters must be populated in order to send to Azure. - :param name: The resource name. - :type name: str - :param properties: Required. Data flow properties. - :type properties: ~azure.mgmt.datafactory.models.DataFlow + :ivar name: The resource name. + :vartype name: str + :ivar properties: Required. Data flow properties. + :vartype properties: ~azure.mgmt.datafactory.models.DataFlow """ _validation = { @@ -11298,6 +15741,12 @@ def __init__( self, **kwargs ): + """ + :keyword name: The resource name. + :paramtype name: str + :keyword properties: Required. Data flow properties. + :paramtype properties: ~azure.mgmt.datafactory.models.DataFlow + """ super(DataFlowDebugResource, self).__init__(**kwargs) self.properties = kwargs['properties'] @@ -11305,27 +15754,27 @@ def __init__( class DataFlowDebugSessionInfo(msrest.serialization.Model): """Data flow debug session info. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param data_flow_name: The name of the data flow. - :type data_flow_name: str - :param compute_type: Compute type of the cluster. - :type compute_type: str - :param core_count: Core count of the cluster. - :type core_count: int - :param node_count: Node count of the cluster. (deprecated property). - :type node_count: int - :param integration_runtime_name: Attached integration runtime name of data flow debug session. - :type integration_runtime_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str - :param start_time: Start time of data flow debug session. - :type start_time: str - :param time_to_live_in_minutes: Compute type of the cluster. - :type time_to_live_in_minutes: int - :param last_activity_time: Last activity time of data flow debug session. - :type last_activity_time: str + :vartype additional_properties: dict[str, any] + :ivar data_flow_name: The name of the data flow. + :vartype data_flow_name: str + :ivar compute_type: Compute type of the cluster. + :vartype compute_type: str + :ivar core_count: Core count of the cluster. + :vartype core_count: int + :ivar node_count: Node count of the cluster. (deprecated property). + :vartype node_count: int + :ivar integration_runtime_name: Attached integration runtime name of data flow debug session. + :vartype integration_runtime_name: str + :ivar session_id: The ID of data flow debug session. + :vartype session_id: str + :ivar start_time: Start time of data flow debug session. + :vartype start_time: str + :ivar time_to_live_in_minutes: Compute type of the cluster. + :vartype time_to_live_in_minutes: int + :ivar last_activity_time: Last activity time of data flow debug session. + :vartype last_activity_time: str """ _attribute_map = { @@ -11345,6 +15794,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword data_flow_name: The name of the data flow. + :paramtype data_flow_name: str + :keyword compute_type: Compute type of the cluster. + :paramtype compute_type: str + :keyword core_count: Core count of the cluster. + :paramtype core_count: int + :keyword node_count: Node count of the cluster. (deprecated property). + :paramtype node_count: int + :keyword integration_runtime_name: Attached integration runtime name of data flow debug + session. + :paramtype integration_runtime_name: str + :keyword session_id: The ID of data flow debug session. + :paramtype session_id: str + :keyword start_time: Start time of data flow debug session. + :paramtype start_time: str + :keyword time_to_live_in_minutes: Compute type of the cluster. + :paramtype time_to_live_in_minutes: int + :keyword last_activity_time: Last activity time of data flow debug session. + :paramtype last_activity_time: str + """ super(DataFlowDebugSessionInfo, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.data_flow_name = kwargs.get('data_flow_name', None) @@ -11361,8 +15834,8 @@ def __init__( class DataFlowFolder(msrest.serialization.Model): """The folder that this data flow is in. If not specified, Data flow will appear at the root level. - :param name: The name of the folder that this data flow is in. - :type name: str + :ivar name: The name of the folder that this data flow is in. + :vartype name: str """ _attribute_map = { @@ -11373,6 +15846,10 @@ def __init__( self, **kwargs ): + """ + :keyword name: The name of the folder that this data flow is in. + :paramtype name: str + """ super(DataFlowFolder, self).__init__(**kwargs) self.name = kwargs.get('name', None) @@ -11382,10 +15859,10 @@ class DataFlowListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of data flows. - :type value: list[~azure.mgmt.datafactory.models.DataFlowResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: Required. List of data flows. + :vartype value: list[~azure.mgmt.datafactory.models.DataFlowResource] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _validation = { @@ -11401,6 +15878,12 @@ def __init__( self, **kwargs ): + """ + :keyword value: Required. List of data flows. + :paramtype value: list[~azure.mgmt.datafactory.models.DataFlowResource] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(DataFlowListResponse, self).__init__(**kwargs) self.value = kwargs['value'] self.next_link = kwargs.get('next_link', None) @@ -11413,17 +15896,17 @@ class DataFlowReference(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] + :vartype additional_properties: dict[str, any] :ivar type: Data flow reference type. Has constant value: "DataFlowReference". :vartype type: str - :param reference_name: Required. Reference data flow name. - :type reference_name: str - :param dataset_parameters: Reference data flow parameters from dataset. - :type dataset_parameters: any - :param parameters: Data flow parameters. - :type parameters: dict[str, any] + :ivar reference_name: Required. Reference data flow name. + :vartype reference_name: str + :ivar dataset_parameters: Reference data flow parameters from dataset. + :vartype dataset_parameters: any + :ivar parameters: Data flow parameters. + :vartype parameters: dict[str, any] """ _validation = { @@ -11445,6 +15928,17 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword reference_name: Required. Reference data flow name. + :paramtype reference_name: str + :keyword dataset_parameters: Reference data flow parameters from dataset. + :paramtype dataset_parameters: any + :keyword parameters: Data flow parameters. + :paramtype parameters: dict[str, any] + """ super(DataFlowReference, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.reference_name = kwargs['reference_name'] @@ -11467,8 +15961,8 @@ class DataFlowResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Required. Data flow properties. - :type properties: ~azure.mgmt.datafactory.models.DataFlow + :ivar properties: Required. Data flow properties. + :vartype properties: ~azure.mgmt.datafactory.models.DataFlow """ _validation = { @@ -11491,6 +15985,10 @@ def __init__( self, **kwargs ): + """ + :keyword properties: Required. Data flow properties. + :paramtype properties: ~azure.mgmt.datafactory.models.DataFlow + """ super(DataFlowResource, self).__init__(**kwargs) self.properties = kwargs['properties'] @@ -11500,16 +15998,16 @@ class Transformation(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. - :type description: str - :param dataset: Dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param linked_service: Linked service reference. - :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param flowlet: Flowlet Reference. - :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference + :ivar name: Required. Transformation name. + :vartype name: str + :ivar description: Transformation description. + :vartype description: str + :ivar dataset: Dataset reference. + :vartype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :ivar linked_service: Linked service reference. + :vartype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar flowlet: Flowlet Reference. + :vartype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference """ _validation = { @@ -11528,6 +16026,18 @@ def __init__( self, **kwargs ): + """ + :keyword name: Required. Transformation name. + :paramtype name: str + :keyword description: Transformation description. + :paramtype description: str + :keyword dataset: Dataset reference. + :paramtype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :keyword linked_service: Linked service reference. + :paramtype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword flowlet: Flowlet Reference. + :paramtype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference + """ super(Transformation, self).__init__(**kwargs) self.name = kwargs['name'] self.description = kwargs.get('description', None) @@ -11541,18 +16051,18 @@ class DataFlowSink(Transformation): All required parameters must be populated in order to send to Azure. - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. - :type description: str - :param dataset: Dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param linked_service: Linked service reference. - :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param flowlet: Flowlet Reference. - :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference - :param schema_linked_service: Schema linked service reference. - :type schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar name: Required. Transformation name. + :vartype name: str + :ivar description: Transformation description. + :vartype description: str + :ivar dataset: Dataset reference. + :vartype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :ivar linked_service: Linked service reference. + :vartype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar flowlet: Flowlet Reference. + :vartype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference + :ivar schema_linked_service: Schema linked service reference. + :vartype schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { @@ -11572,6 +16082,20 @@ def __init__( self, **kwargs ): + """ + :keyword name: Required. Transformation name. + :paramtype name: str + :keyword description: Transformation description. + :paramtype description: str + :keyword dataset: Dataset reference. + :paramtype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :keyword linked_service: Linked service reference. + :paramtype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword flowlet: Flowlet Reference. + :paramtype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference + :keyword schema_linked_service: Schema linked service reference. + :paramtype schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + """ super(DataFlowSink, self).__init__(**kwargs) self.schema_linked_service = kwargs.get('schema_linked_service', None) @@ -11581,18 +16105,18 @@ class DataFlowSource(Transformation): All required parameters must be populated in order to send to Azure. - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. - :type description: str - :param dataset: Dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param linked_service: Linked service reference. - :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param flowlet: Flowlet Reference. - :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference - :param schema_linked_service: Schema linked service reference. - :type schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar name: Required. Transformation name. + :vartype name: str + :ivar description: Transformation description. + :vartype description: str + :ivar dataset: Dataset reference. + :vartype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :ivar linked_service: Linked service reference. + :vartype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar flowlet: Flowlet Reference. + :vartype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference + :ivar schema_linked_service: Schema linked service reference. + :vartype schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { @@ -11612,6 +16136,20 @@ def __init__( self, **kwargs ): + """ + :keyword name: Required. Transformation name. + :paramtype name: str + :keyword description: Transformation description. + :paramtype description: str + :keyword dataset: Dataset reference. + :paramtype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :keyword linked_service: Linked service reference. + :paramtype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword flowlet: Flowlet Reference. + :paramtype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference + :keyword schema_linked_service: Schema linked service reference. + :paramtype schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + """ super(DataFlowSource, self).__init__(**kwargs) self.schema_linked_service = kwargs.get('schema_linked_service', None) @@ -11619,13 +16157,13 @@ def __init__( class DataFlowSourceSetting(msrest.serialization.Model): """Definition of data flow source setting for debug. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param source_name: The data flow source name. - :type source_name: str - :param row_limit: Defines the row limit of data flow source in debug. - :type row_limit: int + :vartype additional_properties: dict[str, any] + :ivar source_name: The data flow source name. + :vartype source_name: str + :ivar row_limit: Defines the row limit of data flow source in debug. + :vartype row_limit: int """ _attribute_map = { @@ -11638,6 +16176,15 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_name: The data flow source name. + :paramtype source_name: str + :keyword row_limit: Defines the row limit of data flow source in debug. + :paramtype row_limit: int + """ super(DataFlowSourceSetting, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.source_name = kwargs.get('source_name', None) @@ -11647,11 +16194,11 @@ def __init__( class DataFlowStagingInfo(msrest.serialization.Model): """Staging info for execute data flow activity. - :param linked_service: Staging linked service reference. - :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType + :ivar linked_service: Staging linked service reference. + :vartype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar folder_path: Folder path for staging blob. Type: string (or Expression with resultType string). - :type folder_path: any + :vartype folder_path: any """ _attribute_map = { @@ -11663,6 +16210,13 @@ def __init__( self, **kwargs ): + """ + :keyword linked_service: Staging linked service reference. + :paramtype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword folder_path: Folder path for staging blob. Type: string (or Expression with resultType + string). + :paramtype folder_path: any + """ super(DataFlowStagingInfo, self).__init__(**kwargs) self.linked_service = kwargs.get('linked_service', None) self.folder_path = kwargs.get('folder_path', None) @@ -11673,43 +16227,43 @@ class DataLakeAnalyticsUSQLActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param script_path: Required. Case-sensitive path to folder that contains the U-SQL script. + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar script_path: Required. Case-sensitive path to folder that contains the U-SQL script. Type: string (or Expression with resultType string). - :type script_path: any - :param script_linked_service: Required. Script linked service reference. - :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param degree_of_parallelism: The maximum number of nodes simultaneously used to run the job. + :vartype script_path: any + :ivar script_linked_service: Required. Script linked service reference. + :vartype script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar degree_of_parallelism: The maximum number of nodes simultaneously used to run the job. Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. - :type degree_of_parallelism: any - :param priority: Determines which jobs out of all that are queued should be selected to run + :vartype degree_of_parallelism: any + :ivar priority: Determines which jobs out of all that are queued should be selected to run first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or Expression with resultType integer), minimum: 1. - :type priority: any - :param parameters: Parameters for U-SQL job request. - :type parameters: dict[str, any] - :param runtime_version: Runtime version of the U-SQL engine to use. Type: string (or Expression + :vartype priority: any + :ivar parameters: Parameters for U-SQL job request. + :vartype parameters: dict[str, any] + :ivar runtime_version: Runtime version of the U-SQL engine to use. Type: string (or Expression with resultType string). - :type runtime_version: any - :param compilation_mode: Compilation mode of U-SQL. Must be one of these values : Semantic, - Full and SingleBox. Type: string (or Expression with resultType string). - :type compilation_mode: any + :vartype runtime_version: any + :ivar compilation_mode: Compilation mode of U-SQL. Must be one of these values : Semantic, Full + and SingleBox. Type: string (or Expression with resultType string). + :vartype compilation_mode: any """ _validation = { @@ -11741,6 +16295,43 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword script_path: Required. Case-sensitive path to folder that contains the U-SQL script. + Type: string (or Expression with resultType string). + :paramtype script_path: any + :keyword script_linked_service: Required. Script linked service reference. + :paramtype script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword degree_of_parallelism: The maximum number of nodes simultaneously used to run the job. + Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. + :paramtype degree_of_parallelism: any + :keyword priority: Determines which jobs out of all that are queued should be selected to run + first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or + Expression with resultType integer), minimum: 1. + :paramtype priority: any + :keyword parameters: Parameters for U-SQL job request. + :paramtype parameters: dict[str, any] + :keyword runtime_version: Runtime version of the U-SQL engine to use. Type: string (or + Expression with resultType string). + :paramtype runtime_version: any + :keyword compilation_mode: Compilation mode of U-SQL. Must be one of these values : Semantic, + Full and SingleBox. Type: string (or Expression with resultType string). + :paramtype compilation_mode: any + """ super(DataLakeAnalyticsUSQLActivity, self).__init__(**kwargs) self.type = 'DataLakeAnalyticsU-SQL' # type: str self.script_path = kwargs['script_path'] @@ -11757,15 +16348,15 @@ class DatasetCompression(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset compression. Type: string (or Expression with resultType string). - :type type: any - :param level: The dataset compression level. Type: string (or Expression with resultType + :vartype type: any + :ivar level: The dataset compression level. Type: string (or Expression with resultType string). - :type level: any + :vartype level: any """ _validation = { @@ -11782,6 +16373,17 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword type: Required. Type of dataset compression. Type: string (or Expression with + resultType string). + :paramtype type: any + :keyword level: The dataset compression level. Type: string (or Expression with resultType + string). + :paramtype level: any + """ super(DatasetCompression, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.type = kwargs['type'] @@ -11791,10 +16393,10 @@ def __init__( class DatasetDataElement(msrest.serialization.Model): """Columns that define the structure of the dataset. - :param name: Name of the column. Type: string (or Expression with resultType string). - :type name: any - :param type: Type of the column. Type: string (or Expression with resultType string). - :type type: any + :ivar name: Name of the column. Type: string (or Expression with resultType string). + :vartype name: any + :ivar type: Type of the column. Type: string (or Expression with resultType string). + :vartype type: any """ _attribute_map = { @@ -11806,6 +16408,12 @@ def __init__( self, **kwargs ): + """ + :keyword name: Name of the column. Type: string (or Expression with resultType string). + :paramtype name: any + :keyword type: Type of the column. Type: string (or Expression with resultType string). + :paramtype type: any + """ super(DatasetDataElement, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.type = kwargs.get('type', None) @@ -11816,10 +16424,10 @@ class DatasetDebugResource(SubResourceDebugResource): All required parameters must be populated in order to send to Azure. - :param name: The resource name. - :type name: str - :param properties: Required. Dataset properties. - :type properties: ~azure.mgmt.datafactory.models.Dataset + :ivar name: The resource name. + :vartype name: str + :ivar properties: Required. Dataset properties. + :vartype properties: ~azure.mgmt.datafactory.models.Dataset """ _validation = { @@ -11835,6 +16443,12 @@ def __init__( self, **kwargs ): + """ + :keyword name: The resource name. + :paramtype name: str + :keyword properties: Required. Dataset properties. + :paramtype properties: ~azure.mgmt.datafactory.models.Dataset + """ super(DatasetDebugResource, self).__init__(**kwargs) self.properties = kwargs['properties'] @@ -11842,8 +16456,8 @@ def __init__( class DatasetFolder(msrest.serialization.Model): """The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :param name: The name of the folder that this Dataset is in. - :type name: str + :ivar name: The name of the folder that this Dataset is in. + :vartype name: str """ _attribute_map = { @@ -11854,6 +16468,10 @@ def __init__( self, **kwargs ): + """ + :keyword name: The name of the folder that this Dataset is in. + :paramtype name: str + """ super(DatasetFolder, self).__init__(**kwargs) self.name = kwargs.get('name', None) @@ -11863,10 +16481,10 @@ class DatasetListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of datasets. - :type value: list[~azure.mgmt.datafactory.models.DatasetResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: Required. List of datasets. + :vartype value: list[~azure.mgmt.datafactory.models.DatasetResource] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _validation = { @@ -11882,6 +16500,12 @@ def __init__( self, **kwargs ): + """ + :keyword value: Required. List of datasets. + :paramtype value: list[~azure.mgmt.datafactory.models.DatasetResource] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(DatasetListResponse, self).__init__(**kwargs) self.value = kwargs['value'] self.next_link = kwargs.get('next_link', None) @@ -11896,10 +16520,10 @@ class DatasetReference(msrest.serialization.Model): :ivar type: Dataset reference type. Has constant value: "DatasetReference". :vartype type: str - :param reference_name: Required. Reference dataset name. - :type reference_name: str - :param parameters: Arguments for dataset. - :type parameters: dict[str, any] + :ivar reference_name: Required. Reference dataset name. + :vartype reference_name: str + :ivar parameters: Arguments for dataset. + :vartype parameters: dict[str, any] """ _validation = { @@ -11919,6 +16543,12 @@ def __init__( self, **kwargs ): + """ + :keyword reference_name: Required. Reference dataset name. + :paramtype reference_name: str + :keyword parameters: Arguments for dataset. + :paramtype parameters: dict[str, any] + """ super(DatasetReference, self).__init__(**kwargs) self.reference_name = kwargs['reference_name'] self.parameters = kwargs.get('parameters', None) @@ -11939,8 +16569,8 @@ class DatasetResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Required. Dataset properties. - :type properties: ~azure.mgmt.datafactory.models.Dataset + :ivar properties: Required. Dataset properties. + :vartype properties: ~azure.mgmt.datafactory.models.Dataset """ _validation = { @@ -11963,6 +16593,10 @@ def __init__( self, **kwargs ): + """ + :keyword properties: Required. Dataset properties. + :paramtype properties: ~azure.mgmt.datafactory.models.Dataset + """ super(DatasetResource, self).__init__(**kwargs) self.properties = kwargs['properties'] @@ -11970,13 +16604,13 @@ def __init__( class DatasetSchemaDataElement(msrest.serialization.Model): """Columns that define the physical type schema of the dataset. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Name of the schema column. Type: string (or Expression with resultType string). - :type name: any - :param type: Type of the schema column. Type: string (or Expression with resultType string). - :type type: any + :vartype additional_properties: dict[str, any] + :ivar name: Name of the schema column. Type: string (or Expression with resultType string). + :vartype name: any + :ivar type: Type of the schema column. Type: string (or Expression with resultType string). + :vartype type: any """ _attribute_map = { @@ -11989,6 +16623,15 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Name of the schema column. Type: string (or Expression with resultType string). + :paramtype name: any + :keyword type: Type of the schema column. Type: string (or Expression with resultType string). + :paramtype type: any + """ super(DatasetSchemaDataElement, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.name = kwargs.get('name', None) @@ -12000,48 +16643,48 @@ class Db2LinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: The connection string. It is mutually exclusive with server, - database, authenticationType, userName, packageCollection and certificateCommonName property. - Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param server: Server name for connection. It is mutually exclusive with connectionString + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: The connection string. It is mutually exclusive with server, database, + authenticationType, userName, packageCollection and certificateCommonName property. Type: + string, SecureString or AzureKeyVaultSecretReference. + :vartype connection_string: any + :ivar server: Server name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :type server: any - :param database: Database name for connection. It is mutually exclusive with connectionString + :vartype server: any + :ivar database: Database name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :type database: any - :param authentication_type: AuthenticationType to be used for connection. It is mutually + :vartype database: any + :ivar authentication_type: AuthenticationType to be used for connection. It is mutually exclusive with connectionString property. Possible values include: "Basic". - :type authentication_type: str or ~azure.mgmt.datafactory.models.Db2AuthenticationType - :param username: Username for authentication. It is mutually exclusive with connectionString + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.Db2AuthenticationType + :ivar username: Username for authentication. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :type username: any - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param package_collection: Under where packages are created when querying database. It is + :vartype username: any + :ivar password: Password for authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar package_collection: Under where packages are created when querying database. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :type package_collection: any - :param certificate_common_name: Certificate Common Name when TLS is enabled. It is mutually + :vartype package_collection: any + :ivar certificate_common_name: Certificate Common Name when TLS is enabled. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :type certificate_common_name: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype certificate_common_name: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -12070,6 +16713,48 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: The connection string. It is mutually exclusive with server, + database, authenticationType, userName, packageCollection and certificateCommonName property. + Type: string, SecureString or AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword server: Server name for connection. It is mutually exclusive with connectionString + property. Type: string (or Expression with resultType string). + :paramtype server: any + :keyword database: Database name for connection. It is mutually exclusive with connectionString + property. Type: string (or Expression with resultType string). + :paramtype database: any + :keyword authentication_type: AuthenticationType to be used for connection. It is mutually + exclusive with connectionString property. Possible values include: "Basic". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.Db2AuthenticationType + :keyword username: Username for authentication. It is mutually exclusive with connectionString + property. Type: string (or Expression with resultType string). + :paramtype username: any + :keyword password: Password for authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword package_collection: Under where packages are created when querying database. It is + mutually exclusive with connectionString property. Type: string (or Expression with resultType + string). + :paramtype package_collection: any + :keyword certificate_common_name: Certificate Common Name when TLS is enabled. It is mutually + exclusive with connectionString property. Type: string (or Expression with resultType string). + :paramtype certificate_common_name: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. It is mutually exclusive with + connectionString property. Type: string (or Expression with resultType string). + :paramtype encrypted_credential: any + """ super(Db2LinkedService, self).__init__(**kwargs) self.type = 'Db2' # type: str self.connection_string = kwargs.get('connection_string', None) @@ -12088,31 +16773,31 @@ class Db2Source(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: Database query. Type: string (or Expression with resultType string). - :type query: any + :vartype additional_columns: any + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: any """ _validation = { @@ -12135,6 +16820,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: any + """ super(Db2Source, self).__init__(**kwargs) self.type = 'Db2Source' # type: str self.query = kwargs.get('query', None) @@ -12145,36 +16855,36 @@ class Db2TableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param schema_type_properties_schema: The Db2 schema name. Type: string (or Expression with + :vartype table_name: any + :ivar schema_type_properties_schema: The Db2 schema name. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any - :param table: The Db2 table name. Type: string (or Expression with resultType string). - :type table: any + :vartype schema_type_properties_schema: any + :ivar table: The Db2 table name. Type: string (or Expression with resultType string). + :vartype table: any """ _validation = { @@ -12201,6 +16911,36 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword schema_type_properties_schema: The Db2 schema name. Type: string (or Expression with + resultType string). + :paramtype schema_type_properties_schema: any + :keyword table: The Db2 table name. Type: string (or Expression with resultType string). + :paramtype table: any + """ super(Db2TableDataset, self).__init__(**kwargs) self.type = 'Db2Table' # type: str self.table_name = kwargs.get('table_name', None) @@ -12213,39 +16953,39 @@ class DeleteActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param recursive: If true, files or sub-folders under current folder path will be deleted + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar recursive: If true, files or sub-folders under current folder path will be deleted recursively. Default is false. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param max_concurrent_connections: The max concurrent connections to connect data source at the + :vartype recursive: any + :ivar max_concurrent_connections: The max concurrent connections to connect data source at the same time. - :type max_concurrent_connections: int - :param enable_logging: Whether to record detailed logs of delete-activity execution. Default + :vartype max_concurrent_connections: int + :ivar enable_logging: Whether to record detailed logs of delete-activity execution. Default value is false. Type: boolean (or Expression with resultType boolean). - :type enable_logging: any - :param log_storage_settings: Log storage settings customer need to provide when enableLogging - is true. - :type log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings - :param dataset: Required. Delete activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param store_settings: Delete activity store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :vartype enable_logging: any + :ivar log_storage_settings: Log storage settings customer need to provide when enableLogging is + true. + :vartype log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings + :ivar dataset: Required. Delete activity dataset reference. + :vartype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :ivar store_settings: Delete activity store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings """ _validation = { @@ -12276,6 +17016,39 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword recursive: If true, files or sub-folders under current folder path will be deleted + recursively. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword max_concurrent_connections: The max concurrent connections to connect data source at + the same time. + :paramtype max_concurrent_connections: int + :keyword enable_logging: Whether to record detailed logs of delete-activity execution. Default + value is false. Type: boolean (or Expression with resultType boolean). + :paramtype enable_logging: any + :keyword log_storage_settings: Log storage settings customer need to provide when enableLogging + is true. + :paramtype log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings + :keyword dataset: Required. Delete activity dataset reference. + :paramtype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :keyword store_settings: Delete activity store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ super(DeleteActivity, self).__init__(**kwargs) self.type = 'Delete' # type: str self.recursive = kwargs.get('recursive', None) @@ -12289,8 +17062,8 @@ def __init__( class DeleteDataFlowDebugSessionRequest(msrest.serialization.Model): """Request body structure for deleting data flow debug session. - :param session_id: The ID of data flow debug session. - :type session_id: str + :ivar session_id: The ID of data flow debug session. + :vartype session_id: str """ _attribute_map = { @@ -12301,6 +17074,10 @@ def __init__( self, **kwargs ): + """ + :keyword session_id: The ID of data flow debug session. + :paramtype session_id: str + """ super(DeleteDataFlowDebugSessionRequest, self).__init__(**kwargs) self.session_id = kwargs.get('session_id', None) @@ -12310,56 +17087,56 @@ class DelimitedTextDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param location: The location of the delimited text storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param column_delimiter: The column delimiter. Type: string (or Expression with resultType + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar location: The location of the delimited text storage. + :vartype location: ~azure.mgmt.datafactory.models.DatasetLocation + :ivar column_delimiter: The column delimiter. Type: string (or Expression with resultType string). - :type column_delimiter: any - :param row_delimiter: The row delimiter. Type: string (or Expression with resultType string). - :type row_delimiter: any - :param encoding_name: The code page name of the preferred encoding. If miss, the default value + :vartype column_delimiter: any + :ivar row_delimiter: The row delimiter. Type: string (or Expression with resultType string). + :vartype row_delimiter: any + :ivar encoding_name: The code page name of the preferred encoding. If miss, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). - :type encoding_name: any - :param compression_codec: The data compressionCodec. Type: string (or Expression with - resultType string). - :type compression_codec: any - :param compression_level: The data compression method used for DelimitedText. - :type compression_level: any - :param quote_char: The quote character. Type: string (or Expression with resultType string). - :type quote_char: any - :param escape_char: The escape character. Type: string (or Expression with resultType string). - :type escape_char: any - :param first_row_as_header: When used as input, treat the first row of data as headers. When + :vartype encoding_name: any + :ivar compression_codec: The data compressionCodec. Type: string (or Expression with resultType + string). + :vartype compression_codec: any + :ivar compression_level: The data compression method used for DelimitedText. + :vartype compression_level: any + :ivar quote_char: The quote character. Type: string (or Expression with resultType string). + :vartype quote_char: any + :ivar escape_char: The escape character. Type: string (or Expression with resultType string). + :vartype escape_char: any + :ivar first_row_as_header: When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). - :type first_row_as_header: any - :param null_value: The null value string. Type: string (or Expression with resultType string). - :type null_value: any + :vartype first_row_as_header: any + :ivar null_value: The null value string. Type: string (or Expression with resultType string). + :vartype null_value: any """ _validation = { @@ -12393,6 +17170,58 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword location: The location of the delimited text storage. + :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation + :keyword column_delimiter: The column delimiter. Type: string (or Expression with resultType + string). + :paramtype column_delimiter: any + :keyword row_delimiter: The row delimiter. Type: string (or Expression with resultType string). + :paramtype row_delimiter: any + :keyword encoding_name: The code page name of the preferred encoding. If miss, the default + value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the + table in the following link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + resultType string). + :paramtype encoding_name: any + :keyword compression_codec: The data compressionCodec. Type: string (or Expression with + resultType string). + :paramtype compression_codec: any + :keyword compression_level: The data compression method used for DelimitedText. + :paramtype compression_level: any + :keyword quote_char: The quote character. Type: string (or Expression with resultType string). + :paramtype quote_char: any + :keyword escape_char: The escape character. Type: string (or Expression with resultType + string). + :paramtype escape_char: any + :keyword first_row_as_header: When used as input, treat the first row of data as headers. When + used as output,write the headers into the output as the first row of data. The default value is + false. Type: boolean (or Expression with resultType boolean). + :paramtype first_row_as_header: any + :keyword null_value: The null value string. Type: string (or Expression with resultType + string). + :paramtype null_value: any + """ super(DelimitedTextDataset, self).__init__(**kwargs) self.type = 'DelimitedText' # type: str self.location = kwargs.get('location', None) @@ -12412,16 +17241,16 @@ class DelimitedTextReadSettings(FormatReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param skip_line_count: Indicates the number of non-empty rows to skip when reading data from + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar skip_line_count: Indicates the number of non-empty rows to skip when reading data from input files. Type: integer (or Expression with resultType integer). - :type skip_line_count: any - :param compression_properties: Compression settings. - :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings + :vartype skip_line_count: any + :ivar compression_properties: Compression settings. + :vartype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings """ _validation = { @@ -12439,6 +17268,16 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword skip_line_count: Indicates the number of non-empty rows to skip when reading data from + input files. Type: integer (or Expression with resultType integer). + :paramtype skip_line_count: any + :keyword compression_properties: Compression settings. + :paramtype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings + """ super(DelimitedTextReadSettings, self).__init__(**kwargs) self.type = 'DelimitedTextReadSettings' # type: str self.skip_line_count = kwargs.get('skip_line_count', None) @@ -12450,33 +17289,33 @@ class DelimitedTextSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: DelimitedText store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: DelimitedText format settings. - :type format_settings: ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings + :vartype disable_metrics_collection: any + :ivar store_settings: DelimitedText store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :ivar format_settings: DelimitedText format settings. + :vartype format_settings: ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings """ _validation = { @@ -12500,6 +17339,33 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: DelimitedText store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :keyword format_settings: DelimitedText format settings. + :paramtype format_settings: ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings + """ super(DelimitedTextSink, self).__init__(**kwargs) self.type = 'DelimitedTextSink' # type: str self.store_settings = kwargs.get('store_settings', None) @@ -12511,30 +17377,30 @@ class DelimitedTextSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: DelimitedText store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param format_settings: DelimitedText format settings. - :type format_settings: ~azure.mgmt.datafactory.models.DelimitedTextReadSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype disable_metrics_collection: any + :ivar store_settings: DelimitedText store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :ivar format_settings: DelimitedText format settings. + :vartype format_settings: ~azure.mgmt.datafactory.models.DelimitedTextReadSettings + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -12557,6 +17423,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: DelimitedText store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :keyword format_settings: DelimitedText format settings. + :paramtype format_settings: ~azure.mgmt.datafactory.models.DelimitedTextReadSettings + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(DelimitedTextSource, self).__init__(**kwargs) self.type = 'DelimitedTextSource' # type: str self.store_settings = kwargs.get('store_settings', None) @@ -12569,24 +17459,24 @@ class DelimitedTextWriteSettings(FormatWriteSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param quote_all_text: Indicates whether string values should always be enclosed with quotes. + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str + :ivar quote_all_text: Indicates whether string values should always be enclosed with quotes. Type: boolean (or Expression with resultType boolean). - :type quote_all_text: any - :param file_extension: Required. The file extension used to create the files. Type: string (or + :vartype quote_all_text: any + :ivar file_extension: Required. The file extension used to create the files. Type: string (or Expression with resultType string). - :type file_extension: any - :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + :vartype file_extension: any + :ivar max_rows_per_file: Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). - :type max_rows_per_file: any - :param file_name_prefix: Specifies the file name pattern + :vartype max_rows_per_file: any + :ivar file_name_prefix: Specifies the file name pattern :code:``_:code:``.:code:`` when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). - :type file_name_prefix: any + :vartype file_name_prefix: any """ _validation = { @@ -12607,6 +17497,24 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword quote_all_text: Indicates whether string values should always be enclosed with quotes. + Type: boolean (or Expression with resultType boolean). + :paramtype quote_all_text: any + :keyword file_extension: Required. The file extension used to create the files. Type: string + (or Expression with resultType string). + :paramtype file_extension: any + :keyword max_rows_per_file: Limit the written file's row count to be smaller than or equal to + the specified count. Type: integer (or Expression with resultType integer). + :paramtype max_rows_per_file: any + :keyword file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :paramtype file_name_prefix: any + """ super(DelimitedTextWriteSettings, self).__init__(**kwargs) self.type = 'DelimitedTextWriteSettings' # type: str self.quote_all_text = kwargs.get('quote_all_text', None) @@ -12623,8 +17531,8 @@ class DependencyReference(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. The type of dependency reference.Constant filled by server. - :type type: str + :ivar type: Required. The type of dependency reference.Constant filled by server. + :vartype type: str """ _validation = { @@ -12643,6 +17551,8 @@ def __init__( self, **kwargs ): + """ + """ super(DependencyReference, self).__init__(**kwargs) self.type = None # type: Optional[str] @@ -12652,16 +17562,16 @@ class DistcpSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param resource_manager_endpoint: Required. Specifies the Yarn ResourceManager endpoint. Type: + :ivar resource_manager_endpoint: Required. Specifies the Yarn ResourceManager endpoint. Type: string (or Expression with resultType string). - :type resource_manager_endpoint: any - :param temp_script_path: Required. Specifies an existing folder path which will be used to - store temp Distcp command script. The script file is generated by ADF and will be removed after - Copy job finished. Type: string (or Expression with resultType string). - :type temp_script_path: any - :param distcp_options: Specifies the Distcp options. Type: string (or Expression with - resultType string). - :type distcp_options: any + :vartype resource_manager_endpoint: any + :ivar temp_script_path: Required. Specifies an existing folder path which will be used to store + temp Distcp command script. The script file is generated by ADF and will be removed after Copy + job finished. Type: string (or Expression with resultType string). + :vartype temp_script_path: any + :ivar distcp_options: Specifies the Distcp options. Type: string (or Expression with resultType + string). + :vartype distcp_options: any """ _validation = { @@ -12679,6 +17589,18 @@ def __init__( self, **kwargs ): + """ + :keyword resource_manager_endpoint: Required. Specifies the Yarn ResourceManager endpoint. + Type: string (or Expression with resultType string). + :paramtype resource_manager_endpoint: any + :keyword temp_script_path: Required. Specifies an existing folder path which will be used to + store temp Distcp command script. The script file is generated by ADF and will be removed after + Copy job finished. Type: string (or Expression with resultType string). + :paramtype temp_script_path: any + :keyword distcp_options: Specifies the Distcp options. Type: string (or Expression with + resultType string). + :paramtype distcp_options: any + """ super(DistcpSettings, self).__init__(**kwargs) self.resource_manager_endpoint = kwargs['resource_manager_endpoint'] self.temp_script_path = kwargs['temp_script_path'] @@ -12690,31 +17612,31 @@ class DocumentDbCollectionDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param collection_name: Required. Document Database collection name. Type: string (or - Expression with resultType string). - :type collection_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar collection_name: Required. Document Database collection name. Type: string (or Expression + with resultType string). + :vartype collection_name: any """ _validation = { @@ -12740,6 +17662,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword collection_name: Required. Document Database collection name. Type: string (or + Expression with resultType string). + :paramtype collection_name: any + """ super(DocumentDbCollectionDataset, self).__init__(**kwargs) self.type = 'DocumentDbCollection' # type: str self.collection_name = kwargs['collection_name'] @@ -12750,35 +17697,35 @@ class DocumentDbCollectionSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or + :vartype disable_metrics_collection: any + :ivar nesting_separator: Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). - :type nesting_separator: any - :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or + :vartype nesting_separator: any + :ivar write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. - :type write_behavior: any + :vartype write_behavior: any """ _validation = { @@ -12802,6 +17749,35 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword nesting_separator: Nested properties separator. Default is . (dot). Type: string (or + Expression with resultType string). + :paramtype nesting_separator: any + :keyword write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or + Expression with resultType string). Allowed values: insert and upsert. + :paramtype write_behavior: any + """ super(DocumentDbCollectionSink, self).__init__(**kwargs) self.type = 'DocumentDbCollectionSink' # type: str self.nesting_separator = kwargs.get('nesting_separator', None) @@ -12813,34 +17789,34 @@ class DocumentDbCollectionSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: Documents query. Type: string (or Expression with resultType string). - :type query: any - :param nesting_separator: Nested properties separator. Type: string (or Expression with + :vartype disable_metrics_collection: any + :ivar query: Documents query. Type: string (or Expression with resultType string). + :vartype query: any + :ivar nesting_separator: Nested properties separator. Type: string (or Expression with resultType string). - :type nesting_separator: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype nesting_separator: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -12864,6 +17840,34 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: Documents query. Type: string (or Expression with resultType string). + :paramtype query: any + :keyword nesting_separator: Nested properties separator. Type: string (or Expression with + resultType string). + :paramtype nesting_separator: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(DocumentDbCollectionSource, self).__init__(**kwargs) self.type = 'DocumentDbCollectionSource' # type: str self.query = kwargs.get('query', None) @@ -12877,28 +17881,28 @@ class DrillLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: An ODBC connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_string: any + :ivar pwd: The Azure key vault secret reference of password in connection string. + :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -12921,6 +17925,28 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword pwd: The Azure key vault secret reference of password in connection string. + :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(DrillLinkedService, self).__init__(**kwargs) self.type = 'Drill' # type: str self.connection_string = kwargs.get('connection_string', None) @@ -12933,32 +17959,32 @@ class DrillSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -12981,6 +18007,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(DrillSource, self).__init__(**kwargs) self.type = 'DrillSource' # type: str self.query = kwargs.get('query', None) @@ -12991,36 +18043,36 @@ class DrillTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param table: The table name of the Drill. Type: string (or Expression with resultType string). - :type table: any - :param schema_type_properties_schema: The schema name of the Drill. Type: string (or Expression + :vartype table_name: any + :ivar table: The table name of the Drill. Type: string (or Expression with resultType string). + :vartype table: any + :ivar schema_type_properties_schema: The schema name of the Drill. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any + :vartype schema_type_properties_schema: any """ _validation = { @@ -13047,6 +18099,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword table: The table name of the Drill. Type: string (or Expression with resultType + string). + :paramtype table: any + :keyword schema_type_properties_schema: The schema name of the Drill. Type: string (or + Expression with resultType string). + :paramtype schema_type_properties_schema: any + """ super(DrillTableDataset, self).__init__(**kwargs) self.type = 'DrillTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -13057,11 +18140,11 @@ def __init__( class DWCopyCommandDefaultValue(msrest.serialization.Model): """Default value. - :param column_name: Column name. Type: object (or Expression with resultType string). - :type column_name: any - :param default_value: The default value of the column. Type: object (or Expression with + :ivar column_name: Column name. Type: object (or Expression with resultType string). + :vartype column_name: any + :ivar default_value: The default value of the column. Type: object (or Expression with resultType string). - :type default_value: any + :vartype default_value: any """ _attribute_map = { @@ -13073,6 +18156,13 @@ def __init__( self, **kwargs ): + """ + :keyword column_name: Column name. Type: object (or Expression with resultType string). + :paramtype column_name: any + :keyword default_value: The default value of the column. Type: object (or Expression with + resultType string). + :paramtype default_value: any + """ super(DWCopyCommandDefaultValue, self).__init__(**kwargs) self.column_name = kwargs.get('column_name', None) self.default_value = kwargs.get('default_value', None) @@ -13081,15 +18171,15 @@ def __init__( class DWCopyCommandSettings(msrest.serialization.Model): """DW Copy Command settings. - :param default_values: Specifies the default values for each target column in SQL DW. The + :ivar default_values: Specifies the default values for each target column in SQL DW. The default values in the property overwrite the DEFAULT constraint set in the DB, and identity column cannot have a default value. Type: array of objects (or Expression with resultType array of objects). - :type default_values: list[~azure.mgmt.datafactory.models.DWCopyCommandDefaultValue] - :param additional_options: Additional options directly passed to SQL DW in Copy Command. Type: + :vartype default_values: list[~azure.mgmt.datafactory.models.DWCopyCommandDefaultValue] + :ivar additional_options: Additional options directly passed to SQL DW in Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalOptions": { "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" }. - :type additional_options: dict[str, str] + :vartype additional_options: dict[str, str] """ _attribute_map = { @@ -13101,6 +18191,17 @@ def __init__( self, **kwargs ): + """ + :keyword default_values: Specifies the default values for each target column in SQL DW. The + default values in the property overwrite the DEFAULT constraint set in the DB, and identity + column cannot have a default value. Type: array of objects (or Expression with resultType array + of objects). + :paramtype default_values: list[~azure.mgmt.datafactory.models.DWCopyCommandDefaultValue] + :keyword additional_options: Additional options directly passed to SQL DW in Copy Command. + Type: key value pairs (value should be string type) (or Expression with resultType object). + Example: "additionalOptions": { "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" }. + :paramtype additional_options: dict[str, str] + """ super(DWCopyCommandSettings, self).__init__(**kwargs) self.default_values = kwargs.get('default_values', None) self.additional_options = kwargs.get('additional_options', None) @@ -13111,40 +18212,40 @@ class DynamicsAXLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param url: Required. The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar url: Required. The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData endpoint. - :type url: any - :param service_principal_id: Required. Specify the application's client ID. Type: string (or + :vartype url: any + :ivar service_principal_id: Required. Specify the application's client ID. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: Required. Specify the application's key. Mark this field as a + :vartype service_principal_id: any + :ivar service_principal_key: Required. Specify the application's key. Mark this field as a SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key Vault. Type: string (or Expression with resultType string). - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. Specify the tenant information (domain name or tenant ID) under which + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: Required. Specify the tenant information (domain name or tenant ID) under which your application resides. Retrieve it by hovering the mouse in the top-right corner of the Azure portal. Type: string (or Expression with resultType string). - :type tenant: any - :param aad_resource_id: Required. Specify the resource you are requesting authorization. Type: + :vartype tenant: any + :ivar aad_resource_id: Required. Specify the resource you are requesting authorization. Type: string (or Expression with resultType string). - :type aad_resource_id: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype aad_resource_id: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -13175,6 +18276,40 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword url: Required. The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData + endpoint. + :paramtype url: any + :keyword service_principal_id: Required. Specify the application's client ID. Type: string (or + Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: Required. Specify the application's key. Mark this field as a + SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key + Vault. Type: string (or Expression with resultType string). + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: Required. Specify the tenant information (domain name or tenant ID) under + which your application resides. Retrieve it by hovering the mouse in the top-right corner of + the Azure portal. Type: string (or Expression with resultType string). + :paramtype tenant: any + :keyword aad_resource_id: Required. Specify the resource you are requesting authorization. + Type: string (or Expression with resultType string). + :paramtype aad_resource_id: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(DynamicsAXLinkedService, self).__init__(**kwargs) self.type = 'DynamicsAX' # type: str self.url = kwargs['url'] @@ -13190,31 +18325,31 @@ class DynamicsAXResourceDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression with resultType string). - :type path: any + :vartype path: any """ _validation = { @@ -13240,6 +18375,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression + with resultType string). + :paramtype path: any + """ super(DynamicsAXResourceDataset, self).__init__(**kwargs) self.type = 'DynamicsAXResource' # type: str self.path = kwargs['path'] @@ -13250,37 +18410,37 @@ class DynamicsAXSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + :vartype query: any + :ivar http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: any + :vartype http_request_timeout: any """ _validation = { @@ -13304,6 +18464,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + :keyword http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the + timeout to get a response, not the timeout to read response data. Default value: 00:05:00. + Type: string (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype http_request_timeout: any + """ super(DynamicsAXSource, self).__init__(**kwargs) self.type = 'DynamicsAXSource' # type: str self.query = kwargs.get('query', None) @@ -13315,31 +18506,31 @@ class DynamicsCrmEntityDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param entity_name: The logical name of the entity. Type: string (or Expression with resultType + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar entity_name: The logical name of the entity. Type: string (or Expression with resultType string). - :type entity_name: any + :vartype entity_name: any """ _validation = { @@ -13364,6 +18555,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword entity_name: The logical name of the entity. Type: string (or Expression with + resultType string). + :paramtype entity_name: any + """ super(DynamicsCrmEntityDataset, self).__init__(**kwargs) self.type = 'DynamicsCrmEntity' # type: str self.entity_name = kwargs.get('entity_name', None) @@ -13374,65 +18590,64 @@ class DynamicsCrmLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' - for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: - string (or Expression with resultType string). - :type deployment_type: any - :param host_name: The host name of the on-premises Dynamics CRM server. The property is - required for on-prem and not allowed for online. Type: string (or Expression with resultType - string). - :type host_name: any - :param port: The port of on-premises Dynamics CRM server. The property is required for on-prem + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' for + Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: string + (or Expression with resultType string). + :vartype deployment_type: any + :ivar host_name: The host name of the on-premises Dynamics CRM server. The property is required + for on-prem and not allowed for online. Type: string (or Expression with resultType string). + :vartype host_name: any + :ivar port: The port of on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: any - :param service_uri: The URL to the Microsoft Dynamics CRM server. The property is required for + :vartype port: any + :ivar service_uri: The URL to the Microsoft Dynamics CRM server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type service_uri: any - :param organization_name: The organization name of the Dynamics CRM instance. The property is + :vartype service_uri: any + :ivar organization_name: The organization name of the Dynamics CRM instance. The property is required for on-prem and required for online when there are more than one Dynamics CRM instances associated with the user. Type: string (or Expression with resultType string). - :type organization_name: any - :param authentication_type: Required. The authentication type to connect to Dynamics CRM - server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, - 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). - :type authentication_type: any - :param username: User name to access the Dynamics CRM instance. Type: string (or Expression - with resultType string). - :type username: any - :param password: Password to access the Dynamics CRM instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_credential_type: The service principal credential type to use in + :vartype organization_name: any + :ivar authentication_type: Required. The authentication type to connect to Dynamics CRM server. + 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' + for Server-To-Server authentication in online scenario. Type: string (or Expression with + resultType string). + :vartype authentication_type: any + :ivar username: User name to access the Dynamics CRM instance. Type: string (or Expression with + resultType string). + :vartype username: any + :ivar password: Password to access the Dynamics CRM instance. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar service_principal_id: The client ID of the application in Azure Active Directory used for + Server-To-Server authentication. Type: string (or Expression with resultType string). + :vartype service_principal_id: any + :ivar service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). - :type service_principal_credential_type: any - :param service_principal_credential: The credential of the service principal object in Azure + :vartype service_principal_credential_type: any + :ivar service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. - :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -13466,6 +18681,65 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' + for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: + string (or Expression with resultType string). + :paramtype deployment_type: any + :keyword host_name: The host name of the on-premises Dynamics CRM server. The property is + required for on-prem and not allowed for online. Type: string (or Expression with resultType + string). + :paramtype host_name: any + :keyword port: The port of on-premises Dynamics CRM server. The property is required for + on-prem and not allowed for online. Default is 443. Type: integer (or Expression with + resultType integer), minimum: 0. + :paramtype port: any + :keyword service_uri: The URL to the Microsoft Dynamics CRM server. The property is required + for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). + :paramtype service_uri: any + :keyword organization_name: The organization name of the Dynamics CRM instance. The property is + required for on-prem and required for online when there are more than one Dynamics CRM + instances associated with the user. Type: string (or Expression with resultType string). + :paramtype organization_name: any + :keyword authentication_type: Required. The authentication type to connect to Dynamics CRM + server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, + 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or + Expression with resultType string). + :paramtype authentication_type: any + :keyword username: User name to access the Dynamics CRM instance. Type: string (or Expression + with resultType string). + :paramtype username: any + :keyword password: Password to access the Dynamics CRM instance. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword service_principal_id: The client ID of the application in Azure Active Directory used + for Server-To-Server authentication. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). + :paramtype service_principal_credential_type: any + :keyword service_principal_credential: The credential of the service principal object in Azure + Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', + servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If + servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only + be AzureKeyVaultSecretReference. + :paramtype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(DynamicsCrmLinkedService, self).__init__(**kwargs) self.type = 'DynamicsCrm' # type: str self.deployment_type = kwargs['deployment_type'] @@ -13487,39 +18761,39 @@ class DynamicsCrmSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param write_behavior: Required. The write behavior for the operation. Possible values include: + :vartype disable_metrics_collection: any + :ivar write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". - :type write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior - :param ignore_null_values: The flag indicating whether to ignore null values from input dataset + :vartype write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior + :ivar ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: any - :param alternate_key_name: The logical name of the alternate key which will be used when + :vartype ignore_null_values: any + :ivar alternate_key_name: The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). - :type alternate_key_name: any + :vartype alternate_key_name: any """ _validation = { @@ -13545,6 +18819,39 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword write_behavior: Required. The write behavior for the operation. Possible values + include: "Upsert". + :paramtype write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior + :keyword ignore_null_values: The flag indicating whether to ignore null values from input + dataset (except key fields) during write operation. Default is false. Type: boolean (or + Expression with resultType boolean). + :paramtype ignore_null_values: any + :keyword alternate_key_name: The logical name of the alternate key which will be used when + upserting records. Type: string (or Expression with resultType string). + :paramtype alternate_key_name: any + """ super(DynamicsCrmSink, self).__init__(**kwargs) self.type = 'DynamicsCrmSink' # type: str self.write_behavior = kwargs['write_behavior'] @@ -13557,29 +18864,29 @@ class DynamicsCrmSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM + :vartype disable_metrics_collection: any + :ivar query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). - :type query: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -13601,6 +18908,29 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM + (online & on-premises). Type: string (or Expression with resultType string). + :paramtype query: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(DynamicsCrmSource, self).__init__(**kwargs) self.type = 'DynamicsCrmSource' # type: str self.query = kwargs.get('query', None) @@ -13612,31 +18942,31 @@ class DynamicsEntityDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param entity_name: The logical name of the entity. Type: string (or Expression with resultType + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar entity_name: The logical name of the entity. Type: string (or Expression with resultType string). - :type entity_name: any + :vartype entity_name: any """ _validation = { @@ -13661,6 +18991,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword entity_name: The logical name of the entity. Type: string (or Expression with + resultType string). + :paramtype entity_name: any + """ super(DynamicsEntityDataset, self).__init__(**kwargs) self.type = 'DynamicsEntity' # type: str self.entity_name = kwargs.get('entity_name', None) @@ -13671,64 +19026,64 @@ class DynamicsLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or Expression with resultType string). - :type deployment_type: any - :param host_name: The host name of the on-premises Dynamics server. The property is required - for on-prem and not allowed for online. Type: string (or Expression with resultType string). - :type host_name: any - :param port: The port of on-premises Dynamics server. The property is required for on-prem and + :vartype deployment_type: any + :ivar host_name: The host name of the on-premises Dynamics server. The property is required for + on-prem and not allowed for online. Type: string (or Expression with resultType string). + :vartype host_name: any + :ivar port: The port of on-premises Dynamics server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: any - :param service_uri: The URL to the Microsoft Dynamics server. The property is required for + :vartype port: any + :ivar service_uri: The URL to the Microsoft Dynamics server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type service_uri: any - :param organization_name: The organization name of the Dynamics instance. The property is + :vartype service_uri: any + :ivar organization_name: The organization name of the Dynamics instance. The property is required for on-prem and required for online when there are more than one Dynamics instances associated with the user. Type: string (or Expression with resultType string). - :type organization_name: any - :param authentication_type: Required. The authentication type to connect to Dynamics server. + :vartype organization_name: any + :ivar authentication_type: Required. The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). - :type authentication_type: any - :param username: User name to access the Dynamics instance. Type: string (or Expression with + :vartype authentication_type: any + :ivar username: User name to access the Dynamics instance. Type: string (or Expression with resultType string). - :type username: any - :param password: Password to access the Dynamics instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_credential_type: The service principal credential type to use in + :vartype username: any + :ivar password: Password to access the Dynamics instance. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar service_principal_id: The client ID of the application in Azure Active Directory used for + Server-To-Server authentication. Type: string (or Expression with resultType string). + :vartype service_principal_id: any + :ivar service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). - :type service_principal_credential_type: str - :param service_principal_credential: The credential of the service principal object in Azure + :vartype service_principal_credential_type: str + :ivar service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. - :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -13762,6 +19117,64 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for + Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or + Expression with resultType string). + :paramtype deployment_type: any + :keyword host_name: The host name of the on-premises Dynamics server. The property is required + for on-prem and not allowed for online. Type: string (or Expression with resultType string). + :paramtype host_name: any + :keyword port: The port of on-premises Dynamics server. The property is required for on-prem + and not allowed for online. Default is 443. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype port: any + :keyword service_uri: The URL to the Microsoft Dynamics server. The property is required for + on-line and not allowed for on-prem. Type: string (or Expression with resultType string). + :paramtype service_uri: any + :keyword organization_name: The organization name of the Dynamics instance. The property is + required for on-prem and required for online when there are more than one Dynamics instances + associated with the user. Type: string (or Expression with resultType string). + :paramtype organization_name: any + :keyword authentication_type: Required. The authentication type to connect to Dynamics server. + 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' + for Server-To-Server authentication in online scenario. Type: string (or Expression with + resultType string). + :paramtype authentication_type: any + :keyword username: User name to access the Dynamics instance. Type: string (or Expression with + resultType string). + :paramtype username: any + :keyword password: Password to access the Dynamics instance. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword service_principal_id: The client ID of the application in Azure Active Directory used + for Server-To-Server authentication. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). + :paramtype service_principal_credential_type: str + :keyword service_principal_credential: The credential of the service principal object in Azure + Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', + servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If + servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only + be AzureKeyVaultSecretReference. + :paramtype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(DynamicsLinkedService, self).__init__(**kwargs) self.type = 'Dynamics' # type: str self.deployment_type = kwargs['deployment_type'] @@ -13783,39 +19196,39 @@ class DynamicsSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param write_behavior: Required. The write behavior for the operation. Possible values include: + :vartype disable_metrics_collection: any + :ivar write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". - :type write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior - :param ignore_null_values: The flag indicating whether ignore null values from input dataset + :vartype write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior + :ivar ignore_null_values: The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: any - :param alternate_key_name: The logical name of the alternate key which will be used when + :vartype ignore_null_values: any + :ivar alternate_key_name: The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). - :type alternate_key_name: any + :vartype alternate_key_name: any """ _validation = { @@ -13841,6 +19254,39 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword write_behavior: Required. The write behavior for the operation. Possible values + include: "Upsert". + :paramtype write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior + :keyword ignore_null_values: The flag indicating whether ignore null values from input dataset + (except key fields) during write operation. Default is false. Type: boolean (or Expression with + resultType boolean). + :paramtype ignore_null_values: any + :keyword alternate_key_name: The logical name of the alternate key which will be used when + upserting records. Type: string (or Expression with resultType string). + :paramtype alternate_key_name: any + """ super(DynamicsSink, self).__init__(**kwargs) self.type = 'DynamicsSink' # type: str self.write_behavior = kwargs['write_behavior'] @@ -13853,29 +19299,29 @@ class DynamicsSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics + :vartype disable_metrics_collection: any + :ivar query: FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). - :type query: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -13897,6 +19343,29 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: FetchXML is a proprietary query language that is used in Microsoft Dynamics + (online & on-premises). Type: string (or Expression with resultType string). + :paramtype query: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(DynamicsSource, self).__init__(**kwargs) self.type = 'DynamicsSource' # type: str self.query = kwargs.get('query', None) @@ -13908,40 +19377,40 @@ class EloquaLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param endpoint: Required. The endpoint of the Eloqua server. (i.e. eloqua.example.com). - :type endpoint: any - :param username: Required. The site name and user name of your Eloqua account in the form: + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar endpoint: Required. The endpoint of the Eloqua server. (i.e. eloqua.example.com). + :vartype endpoint: any + :ivar username: Required. The site name and user name of your Eloqua account in the form: sitename/username. (i.e. Eloqua/Alice). - :type username: any - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype username: any + :ivar password: The password corresponding to the user name. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -13970,6 +19439,40 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword endpoint: Required. The endpoint of the Eloqua server. (i.e. eloqua.example.com). + :paramtype endpoint: any + :keyword username: Required. The site name and user name of your Eloqua account in the form: + sitename/username. (i.e. Eloqua/Alice). + :paramtype username: any + :keyword password: The password corresponding to the user name. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(EloquaLinkedService, self).__init__(**kwargs) self.type = 'Eloqua' # type: str self.endpoint = kwargs['endpoint'] @@ -13986,30 +19489,30 @@ class EloquaObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -14034,6 +19537,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(EloquaObjectDataset, self).__init__(**kwargs) self.type = 'EloquaObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -14044,32 +19571,32 @@ class EloquaSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -14092,6 +19619,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(EloquaSource, self).__init__(**kwargs) self.type = 'EloquaSource' # type: str self.query = kwargs.get('query', None) @@ -14102,17 +19655,17 @@ class EncryptionConfiguration(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param key_name: Required. The name of the key in Azure Key Vault to use as Customer Managed + :ivar key_name: Required. The name of the key in Azure Key Vault to use as Customer Managed Key. - :type key_name: str - :param vault_base_url: Required. The url of the Azure Key Vault used for CMK. - :type vault_base_url: str - :param key_version: The version of the key used for CMK. If not provided, latest version will - be used. - :type key_version: str - :param identity: User assigned identity to use to authenticate to customer's key vault. If not + :vartype key_name: str + :ivar vault_base_url: Required. The url of the Azure Key Vault used for CMK. + :vartype vault_base_url: str + :ivar key_version: The version of the key used for CMK. If not provided, latest version will be + used. + :vartype key_version: str + :ivar identity: User assigned identity to use to authenticate to customer's key vault. If not provided Managed Service Identity will be used. - :type identity: ~azure.mgmt.datafactory.models.CMKIdentityDefinition + :vartype identity: ~azure.mgmt.datafactory.models.CMKIdentityDefinition """ _validation = { @@ -14131,6 +19684,19 @@ def __init__( self, **kwargs ): + """ + :keyword key_name: Required. The name of the key in Azure Key Vault to use as Customer Managed + Key. + :paramtype key_name: str + :keyword vault_base_url: Required. The url of the Azure Key Vault used for CMK. + :paramtype vault_base_url: str + :keyword key_version: The version of the key used for CMK. If not provided, latest version will + be used. + :paramtype key_version: str + :keyword identity: User assigned identity to use to authenticate to customer's key vault. If + not provided Managed Service Identity will be used. + :paramtype identity: ~azure.mgmt.datafactory.models.CMKIdentityDefinition + """ super(EncryptionConfiguration, self).__init__(**kwargs) self.key_name = kwargs['key_name'] self.vault_base_url = kwargs['vault_base_url'] @@ -14141,11 +19707,11 @@ def __init__( class EntityReference(msrest.serialization.Model): """The entity reference. - :param type: The type of this referenced entity. Possible values include: + :ivar type: The type of this referenced entity. Possible values include: "IntegrationRuntimeReference", "LinkedServiceReference". - :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType - :param reference_name: The name of this referenced entity. - :type reference_name: str + :vartype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType + :ivar reference_name: The name of this referenced entity. + :vartype reference_name: str """ _attribute_map = { @@ -14157,6 +19723,13 @@ def __init__( self, **kwargs ): + """ + :keyword type: The type of this referenced entity. Possible values include: + "IntegrationRuntimeReference", "LinkedServiceReference". + :paramtype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType + :keyword reference_name: The name of this referenced entity. + :paramtype reference_name: str + """ super(EntityReference, self).__init__(**kwargs) self.type = kwargs.get('type', None) self.reference_name = kwargs.get('reference_name', None) @@ -14167,12 +19740,12 @@ class EnvironmentVariableSetup(CustomSetupBase): All required parameters must be populated in order to send to Azure. - :param type: Required. The type of custom setup.Constant filled by server. - :type type: str - :param variable_name: Required. The name of the environment variable. - :type variable_name: str - :param variable_value: Required. The value of the environment variable. - :type variable_value: str + :ivar type: Required. The type of custom setup.Constant filled by server. + :vartype type: str + :ivar variable_name: Required. The name of the environment variable. + :vartype variable_name: str + :ivar variable_value: Required. The value of the environment variable. + :vartype variable_value: str """ _validation = { @@ -14191,6 +19764,12 @@ def __init__( self, **kwargs ): + """ + :keyword variable_name: Required. The name of the environment variable. + :paramtype variable_name: str + :keyword variable_value: Required. The value of the environment variable. + :paramtype variable_value: str + """ super(EnvironmentVariableSetup, self).__init__(**kwargs) self.type = 'EnvironmentVariableSetup' # type: str self.variable_name = kwargs['variable_name'] @@ -14202,47 +19781,47 @@ class ExcelDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param location: The location of the excel storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param sheet_name: The sheet name of excel file. Type: string (or Expression with resultType + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar location: The location of the excel storage. + :vartype location: ~azure.mgmt.datafactory.models.DatasetLocation + :ivar sheet_name: The sheet name of excel file. Type: string (or Expression with resultType string). - :type sheet_name: any - :param sheet_index: The sheet index of excel file and default value is 0. Type: integer (or + :vartype sheet_name: any + :ivar sheet_index: The sheet index of excel file and default value is 0. Type: integer (or Expression with resultType integer). - :type sheet_index: any - :param range: The partial data of one sheet. Type: string (or Expression with resultType + :vartype sheet_index: any + :ivar range: The partial data of one sheet. Type: string (or Expression with resultType string). - :type range: any - :param first_row_as_header: When used as input, treat the first row of data as headers. When + :vartype range: any + :ivar first_row_as_header: When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). - :type first_row_as_header: any - :param compression: The data compression method used for the json dataset. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - :param null_value: The null value string. Type: string (or Expression with resultType string). - :type null_value: any + :vartype first_row_as_header: any + :ivar compression: The data compression method used for the json dataset. + :vartype compression: ~azure.mgmt.datafactory.models.DatasetCompression + :ivar null_value: The null value string. Type: string (or Expression with resultType string). + :vartype null_value: any """ _validation = { @@ -14273,6 +19852,48 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword location: The location of the excel storage. + :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation + :keyword sheet_name: The sheet name of excel file. Type: string (or Expression with resultType + string). + :paramtype sheet_name: any + :keyword sheet_index: The sheet index of excel file and default value is 0. Type: integer (or + Expression with resultType integer). + :paramtype sheet_index: any + :keyword range: The partial data of one sheet. Type: string (or Expression with resultType + string). + :paramtype range: any + :keyword first_row_as_header: When used as input, treat the first row of data as headers. When + used as output,write the headers into the output as the first row of data. The default value is + false. Type: boolean (or Expression with resultType boolean). + :paramtype first_row_as_header: any + :keyword compression: The data compression method used for the json dataset. + :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression + :keyword null_value: The null value string. Type: string (or Expression with resultType + string). + :paramtype null_value: any + """ super(ExcelDataset, self).__init__(**kwargs) self.type = 'Excel' # type: str self.location = kwargs.get('location', None) @@ -14289,28 +19910,28 @@ class ExcelSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: Excel store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype disable_metrics_collection: any + :ivar store_settings: Excel store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -14332,6 +19953,28 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: Excel store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(ExcelSource, self).__init__(**kwargs) self.type = 'ExcelSource' # type: str self.store_settings = kwargs.get('store_settings', None) @@ -14343,41 +19986,41 @@ class ExecuteDataFlowActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param data_flow: Required. Data flow reference. - :type data_flow: ~azure.mgmt.datafactory.models.DataFlowReference - :param staging: Staging info for execute data flow activity. - :type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo - :param integration_runtime: The integration runtime reference. - :type integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param compute: Compute properties for data flow activity. - :type compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute - :param trace_level: Trace level setting used for data flow monitoring output. Supported values + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar data_flow: Required. Data flow reference. + :vartype data_flow: ~azure.mgmt.datafactory.models.DataFlowReference + :ivar staging: Staging info for execute data flow activity. + :vartype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo + :ivar integration_runtime: The integration runtime reference. + :vartype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar compute: Compute properties for data flow activity. + :vartype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute + :ivar trace_level: Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). - :type trace_level: any - :param continue_on_error: Continue on error setting used for data flow execution. Enables + :vartype trace_level: any + :ivar continue_on_error: Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). - :type continue_on_error: any - :param run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with + :vartype continue_on_error: any + :ivar run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). - :type run_concurrently: any + :vartype run_concurrently: any """ _validation = { @@ -14408,6 +20051,41 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword data_flow: Required. Data flow reference. + :paramtype data_flow: ~azure.mgmt.datafactory.models.DataFlowReference + :keyword staging: Staging info for execute data flow activity. + :paramtype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo + :keyword integration_runtime: The integration runtime reference. + :paramtype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword compute: Compute properties for data flow activity. + :paramtype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute + :keyword trace_level: Trace level setting used for data flow monitoring output. Supported + values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). + :paramtype trace_level: any + :keyword continue_on_error: Continue on error setting used for data flow execution. Enables + processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). + :paramtype continue_on_error: any + :keyword run_concurrently: Concurrent run setting used for data flow execution. Allows sinks + with the same save order to be processed concurrently. Type: boolean (or Expression with + resultType boolean). + :paramtype run_concurrently: any + """ super(ExecuteDataFlowActivity, self).__init__(**kwargs) self.type = 'ExecuteDataFlow' # type: str self.data_flow = kwargs['data_flow'] @@ -14424,24 +20102,24 @@ class ExecuteDataFlowActivityTypeProperties(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param data_flow: Required. Data flow reference. - :type data_flow: ~azure.mgmt.datafactory.models.DataFlowReference - :param staging: Staging info for execute data flow activity. - :type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo - :param integration_runtime: The integration runtime reference. - :type integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param compute: Compute properties for data flow activity. - :type compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute - :param trace_level: Trace level setting used for data flow monitoring output. Supported values + :ivar data_flow: Required. Data flow reference. + :vartype data_flow: ~azure.mgmt.datafactory.models.DataFlowReference + :ivar staging: Staging info for execute data flow activity. + :vartype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo + :ivar integration_runtime: The integration runtime reference. + :vartype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar compute: Compute properties for data flow activity. + :vartype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute + :ivar trace_level: Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). - :type trace_level: any - :param continue_on_error: Continue on error setting used for data flow execution. Enables + :vartype trace_level: any + :ivar continue_on_error: Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). - :type continue_on_error: any - :param run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with + :vartype continue_on_error: any + :ivar run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). - :type run_concurrently: any + :vartype run_concurrently: any """ _validation = { @@ -14462,6 +20140,26 @@ def __init__( self, **kwargs ): + """ + :keyword data_flow: Required. Data flow reference. + :paramtype data_flow: ~azure.mgmt.datafactory.models.DataFlowReference + :keyword staging: Staging info for execute data flow activity. + :paramtype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo + :keyword integration_runtime: The integration runtime reference. + :paramtype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword compute: Compute properties for data flow activity. + :paramtype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute + :keyword trace_level: Trace level setting used for data flow monitoring output. Supported + values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). + :paramtype trace_level: any + :keyword continue_on_error: Continue on error setting used for data flow execution. Enables + processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). + :paramtype continue_on_error: any + :keyword run_concurrently: Concurrent run setting used for data flow execution. Allows sinks + with the same save order to be processed concurrently. Type: boolean (or Expression with + resultType boolean). + :paramtype run_concurrently: any + """ super(ExecuteDataFlowActivityTypeProperties, self).__init__(**kwargs) self.data_flow = kwargs['data_flow'] self.staging = kwargs.get('staging', None) @@ -14475,13 +20173,13 @@ def __init__( class ExecuteDataFlowActivityTypePropertiesCompute(msrest.serialization.Model): """Compute properties for data flow activity. - :param compute_type: Compute type of the cluster which will execute data flow job. Possible + :ivar compute_type: Compute type of the cluster which will execute data flow job. Possible values include: 'General', 'MemoryOptimized', 'ComputeOptimized'. Type: string (or Expression with resultType string). - :type compute_type: any - :param core_count: Core count of the cluster which will execute data flow job. Supported values + :vartype compute_type: any + :ivar core_count: Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. Type: integer (or Expression with resultType integer). - :type core_count: any + :vartype core_count: any """ _attribute_map = { @@ -14493,6 +20191,16 @@ def __init__( self, **kwargs ): + """ + :keyword compute_type: Compute type of the cluster which will execute data flow job. Possible + values include: 'General', 'MemoryOptimized', 'ComputeOptimized'. Type: string (or Expression + with resultType string). + :paramtype compute_type: any + :keyword core_count: Core count of the cluster which will execute data flow job. Supported + values are: 8, 16, 32, 48, 80, 144 and 272. Type: integer (or Expression with resultType + integer). + :paramtype core_count: any + """ super(ExecuteDataFlowActivityTypePropertiesCompute, self).__init__(**kwargs) self.compute_type = kwargs.get('compute_type', None) self.core_count = kwargs.get('core_count', None) @@ -14503,26 +20211,26 @@ class ExecutePipelineActivity(ControlActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param pipeline: Required. Pipeline reference. - :type pipeline: ~azure.mgmt.datafactory.models.PipelineReference - :param parameters: Pipeline parameters. - :type parameters: dict[str, any] - :param wait_on_completion: Defines whether activity execution will wait for the dependent + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar pipeline: Required. Pipeline reference. + :vartype pipeline: ~azure.mgmt.datafactory.models.PipelineReference + :ivar parameters: Pipeline parameters. + :vartype parameters: dict[str, any] + :ivar wait_on_completion: Defines whether activity execution will wait for the dependent pipeline execution to finish. Default is false. - :type wait_on_completion: bool + :vartype wait_on_completion: bool """ _validation = { @@ -14547,6 +20255,26 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword pipeline: Required. Pipeline reference. + :paramtype pipeline: ~azure.mgmt.datafactory.models.PipelineReference + :keyword parameters: Pipeline parameters. + :paramtype parameters: dict[str, any] + :keyword wait_on_completion: Defines whether activity execution will wait for the dependent + pipeline execution to finish. Default is false. + :paramtype wait_on_completion: bool + """ super(ExecutePipelineActivity, self).__init__(**kwargs) self.type = 'ExecutePipeline' # type: str self.pipeline = kwargs['pipeline'] @@ -14559,29 +20287,29 @@ class ExecutePowerQueryActivityTypeProperties(ExecuteDataFlowActivityTypePropert All required parameters must be populated in order to send to Azure. - :param data_flow: Required. Data flow reference. - :type data_flow: ~azure.mgmt.datafactory.models.DataFlowReference - :param staging: Staging info for execute data flow activity. - :type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo - :param integration_runtime: The integration runtime reference. - :type integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param compute: Compute properties for data flow activity. - :type compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute - :param trace_level: Trace level setting used for data flow monitoring output. Supported values + :ivar data_flow: Required. Data flow reference. + :vartype data_flow: ~azure.mgmt.datafactory.models.DataFlowReference + :ivar staging: Staging info for execute data flow activity. + :vartype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo + :ivar integration_runtime: The integration runtime reference. + :vartype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar compute: Compute properties for data flow activity. + :vartype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute + :ivar trace_level: Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). - :type trace_level: any - :param continue_on_error: Continue on error setting used for data flow execution. Enables + :vartype trace_level: any + :ivar continue_on_error: Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). - :type continue_on_error: any - :param run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with + :vartype continue_on_error: any + :ivar run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). - :type run_concurrently: any - :param sinks: (Deprecated. Please use Queries). List of Power Query activity sinks mapped to a + :vartype run_concurrently: any + :ivar sinks: (Deprecated. Please use Queries). List of Power Query activity sinks mapped to a queryName. - :type sinks: dict[str, ~azure.mgmt.datafactory.models.PowerQuerySink] - :param queries: List of mapping for Power Query mashup query to sink dataset(s). - :type queries: list[~azure.mgmt.datafactory.models.PowerQuerySinkMapping] + :vartype sinks: dict[str, ~azure.mgmt.datafactory.models.PowerQuerySink] + :ivar queries: List of mapping for Power Query mashup query to sink dataset(s). + :vartype queries: list[~azure.mgmt.datafactory.models.PowerQuerySinkMapping] """ _validation = { @@ -14604,6 +20332,31 @@ def __init__( self, **kwargs ): + """ + :keyword data_flow: Required. Data flow reference. + :paramtype data_flow: ~azure.mgmt.datafactory.models.DataFlowReference + :keyword staging: Staging info for execute data flow activity. + :paramtype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo + :keyword integration_runtime: The integration runtime reference. + :paramtype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword compute: Compute properties for data flow activity. + :paramtype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute + :keyword trace_level: Trace level setting used for data flow monitoring output. Supported + values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). + :paramtype trace_level: any + :keyword continue_on_error: Continue on error setting used for data flow execution. Enables + processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). + :paramtype continue_on_error: any + :keyword run_concurrently: Concurrent run setting used for data flow execution. Allows sinks + with the same save order to be processed concurrently. Type: boolean (or Expression with + resultType boolean). + :paramtype run_concurrently: any + :keyword sinks: (Deprecated. Please use Queries). List of Power Query activity sinks mapped to + a queryName. + :paramtype sinks: dict[str, ~azure.mgmt.datafactory.models.PowerQuerySink] + :keyword queries: List of mapping for Power Query mashup query to sink dataset(s). + :paramtype queries: list[~azure.mgmt.datafactory.models.PowerQuerySinkMapping] + """ super(ExecutePowerQueryActivityTypeProperties, self).__init__(**kwargs) self.sinks = kwargs.get('sinks', None) self.queries = kwargs.get('queries', None) @@ -14614,54 +20367,54 @@ class ExecuteSSISPackageActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param package_location: Required. SSIS package location. - :type package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation - :param runtime: Specifies the runtime to execute SSIS package. The value should be "x86" or + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar package_location: Required. SSIS package location. + :vartype package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation + :ivar runtime: Specifies the runtime to execute SSIS package. The value should be "x86" or "x64". Type: string (or Expression with resultType string). - :type runtime: any - :param logging_level: The logging level of SSIS package execution. Type: string (or Expression + :vartype runtime: any + :ivar logging_level: The logging level of SSIS package execution. Type: string (or Expression with resultType string). - :type logging_level: any - :param environment_path: The environment path to execute the SSIS package. Type: string (or + :vartype logging_level: any + :ivar environment_path: The environment path to execute the SSIS package. Type: string (or Expression with resultType string). - :type environment_path: any - :param execution_credential: The package execution credential. - :type execution_credential: ~azure.mgmt.datafactory.models.SSISExecutionCredential - :param connect_via: Required. The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param project_parameters: The project level parameters to execute the SSIS package. - :type project_parameters: dict[str, ~azure.mgmt.datafactory.models.SSISExecutionParameter] - :param package_parameters: The package level parameters to execute the SSIS package. - :type package_parameters: dict[str, ~azure.mgmt.datafactory.models.SSISExecutionParameter] - :param project_connection_managers: The project level connection managers to execute the SSIS + :vartype environment_path: any + :ivar execution_credential: The package execution credential. + :vartype execution_credential: ~azure.mgmt.datafactory.models.SSISExecutionCredential + :ivar connect_via: Required. The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar project_parameters: The project level parameters to execute the SSIS package. + :vartype project_parameters: dict[str, ~azure.mgmt.datafactory.models.SSISExecutionParameter] + :ivar package_parameters: The package level parameters to execute the SSIS package. + :vartype package_parameters: dict[str, ~azure.mgmt.datafactory.models.SSISExecutionParameter] + :ivar project_connection_managers: The project level connection managers to execute the SSIS package. - :type project_connection_managers: dict[str, dict[str, + :vartype project_connection_managers: dict[str, dict[str, ~azure.mgmt.datafactory.models.SSISExecutionParameter]] - :param package_connection_managers: The package level connection managers to execute the SSIS + :ivar package_connection_managers: The package level connection managers to execute the SSIS package. - :type package_connection_managers: dict[str, dict[str, + :vartype package_connection_managers: dict[str, dict[str, ~azure.mgmt.datafactory.models.SSISExecutionParameter]] - :param property_overrides: The property overrides to execute the SSIS package. - :type property_overrides: dict[str, ~azure.mgmt.datafactory.models.SSISPropertyOverride] - :param log_location: SSIS package execution log location. - :type log_location: ~azure.mgmt.datafactory.models.SSISLogLocation + :ivar property_overrides: The property overrides to execute the SSIS package. + :vartype property_overrides: dict[str, ~azure.mgmt.datafactory.models.SSISPropertyOverride] + :ivar log_location: SSIS package execution log location. + :vartype log_location: ~azure.mgmt.datafactory.models.SSISLogLocation """ _validation = { @@ -14698,6 +20451,54 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword package_location: Required. SSIS package location. + :paramtype package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation + :keyword runtime: Specifies the runtime to execute SSIS package. The value should be "x86" or + "x64". Type: string (or Expression with resultType string). + :paramtype runtime: any + :keyword logging_level: The logging level of SSIS package execution. Type: string (or + Expression with resultType string). + :paramtype logging_level: any + :keyword environment_path: The environment path to execute the SSIS package. Type: string (or + Expression with resultType string). + :paramtype environment_path: any + :keyword execution_credential: The package execution credential. + :paramtype execution_credential: ~azure.mgmt.datafactory.models.SSISExecutionCredential + :keyword connect_via: Required. The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword project_parameters: The project level parameters to execute the SSIS package. + :paramtype project_parameters: dict[str, ~azure.mgmt.datafactory.models.SSISExecutionParameter] + :keyword package_parameters: The package level parameters to execute the SSIS package. + :paramtype package_parameters: dict[str, ~azure.mgmt.datafactory.models.SSISExecutionParameter] + :keyword project_connection_managers: The project level connection managers to execute the SSIS + package. + :paramtype project_connection_managers: dict[str, dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter]] + :keyword package_connection_managers: The package level connection managers to execute the SSIS + package. + :paramtype package_connection_managers: dict[str, dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter]] + :keyword property_overrides: The property overrides to execute the SSIS package. + :paramtype property_overrides: dict[str, ~azure.mgmt.datafactory.models.SSISPropertyOverride] + :keyword log_location: SSIS package execution log location. + :paramtype log_location: ~azure.mgmt.datafactory.models.SSISLogLocation + """ super(ExecuteSSISPackageActivity, self).__init__(**kwargs) self.type = 'ExecuteSSISPackage' # type: str self.package_location = kwargs['package_location'] @@ -14719,44 +20520,44 @@ class ExecuteWranglingDataflowActivity(Activity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param data_flow: Required. Data flow reference. - :type data_flow: ~azure.mgmt.datafactory.models.DataFlowReference - :param staging: Staging info for execute data flow activity. - :type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo - :param integration_runtime: The integration runtime reference. - :type integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param compute: Compute properties for data flow activity. - :type compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute - :param trace_level: Trace level setting used for data flow monitoring output. Supported values + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar data_flow: Required. Data flow reference. + :vartype data_flow: ~azure.mgmt.datafactory.models.DataFlowReference + :ivar staging: Staging info for execute data flow activity. + :vartype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo + :ivar integration_runtime: The integration runtime reference. + :vartype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar compute: Compute properties for data flow activity. + :vartype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute + :ivar trace_level: Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). - :type trace_level: any - :param continue_on_error: Continue on error setting used for data flow execution. Enables + :vartype trace_level: any + :ivar continue_on_error: Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). - :type continue_on_error: any - :param run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with + :vartype continue_on_error: any + :ivar run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). - :type run_concurrently: any - :param sinks: (Deprecated. Please use Queries). List of Power Query activity sinks mapped to a + :vartype run_concurrently: any + :ivar sinks: (Deprecated. Please use Queries). List of Power Query activity sinks mapped to a queryName. - :type sinks: dict[str, ~azure.mgmt.datafactory.models.PowerQuerySink] - :param queries: List of mapping for Power Query mashup query to sink dataset(s). - :type queries: list[~azure.mgmt.datafactory.models.PowerQuerySinkMapping] + :vartype sinks: dict[str, ~azure.mgmt.datafactory.models.PowerQuerySink] + :ivar queries: List of mapping for Power Query mashup query to sink dataset(s). + :vartype queries: list[~azure.mgmt.datafactory.models.PowerQuerySinkMapping] """ _validation = { @@ -14788,6 +20589,44 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword data_flow: Required. Data flow reference. + :paramtype data_flow: ~azure.mgmt.datafactory.models.DataFlowReference + :keyword staging: Staging info for execute data flow activity. + :paramtype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo + :keyword integration_runtime: The integration runtime reference. + :paramtype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword compute: Compute properties for data flow activity. + :paramtype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute + :keyword trace_level: Trace level setting used for data flow monitoring output. Supported + values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). + :paramtype trace_level: any + :keyword continue_on_error: Continue on error setting used for data flow execution. Enables + processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). + :paramtype continue_on_error: any + :keyword run_concurrently: Concurrent run setting used for data flow execution. Allows sinks + with the same save order to be processed concurrently. Type: boolean (or Expression with + resultType boolean). + :paramtype run_concurrently: any + :keyword sinks: (Deprecated. Please use Queries). List of Power Query activity sinks mapped to + a queryName. + :paramtype sinks: dict[str, ~azure.mgmt.datafactory.models.PowerQuerySink] + :keyword queries: List of mapping for Power Query mashup query to sink dataset(s). + :paramtype queries: list[~azure.mgmt.datafactory.models.PowerQuerySinkMapping] + """ super(ExecuteWranglingDataflowActivity, self).__init__(**kwargs) self.type = 'ExecuteWranglingDataflow' # type: str self.policy = kwargs.get('policy', None) @@ -14807,8 +20646,8 @@ class ExposureControlBatchRequest(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param exposure_control_requests: Required. List of exposure control features. - :type exposure_control_requests: list[~azure.mgmt.datafactory.models.ExposureControlRequest] + :ivar exposure_control_requests: Required. List of exposure control features. + :vartype exposure_control_requests: list[~azure.mgmt.datafactory.models.ExposureControlRequest] """ _validation = { @@ -14823,6 +20662,11 @@ def __init__( self, **kwargs ): + """ + :keyword exposure_control_requests: Required. List of exposure control features. + :paramtype exposure_control_requests: + list[~azure.mgmt.datafactory.models.ExposureControlRequest] + """ super(ExposureControlBatchRequest, self).__init__(**kwargs) self.exposure_control_requests = kwargs['exposure_control_requests'] @@ -14832,8 +20676,9 @@ class ExposureControlBatchResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param exposure_control_responses: Required. List of exposure control feature values. - :type exposure_control_responses: list[~azure.mgmt.datafactory.models.ExposureControlResponse] + :ivar exposure_control_responses: Required. List of exposure control feature values. + :vartype exposure_control_responses: + list[~azure.mgmt.datafactory.models.ExposureControlResponse] """ _validation = { @@ -14848,6 +20693,11 @@ def __init__( self, **kwargs ): + """ + :keyword exposure_control_responses: Required. List of exposure control feature values. + :paramtype exposure_control_responses: + list[~azure.mgmt.datafactory.models.ExposureControlResponse] + """ super(ExposureControlBatchResponse, self).__init__(**kwargs) self.exposure_control_responses = kwargs['exposure_control_responses'] @@ -14855,10 +20705,10 @@ def __init__( class ExposureControlRequest(msrest.serialization.Model): """The exposure control request. - :param feature_name: The feature name. - :type feature_name: str - :param feature_type: The feature type. - :type feature_type: str + :ivar feature_name: The feature name. + :vartype feature_name: str + :ivar feature_type: The feature type. + :vartype feature_type: str """ _attribute_map = { @@ -14870,6 +20720,12 @@ def __init__( self, **kwargs ): + """ + :keyword feature_name: The feature name. + :paramtype feature_name: str + :keyword feature_type: The feature type. + :paramtype feature_type: str + """ super(ExposureControlRequest, self).__init__(**kwargs) self.feature_name = kwargs.get('feature_name', None) self.feature_type = kwargs.get('feature_type', None) @@ -14900,6 +20756,8 @@ def __init__( self, **kwargs ): + """ + """ super(ExposureControlResponse, self).__init__(**kwargs) self.feature_name = None self.value = None @@ -14914,8 +20772,8 @@ class Expression(msrest.serialization.Model): :ivar type: Expression type. Has constant value: "Expression". :vartype type: str - :param value: Required. Expression value. - :type value: str + :ivar value: Required. Expression value. + :vartype value: str """ _validation = { @@ -14934,6 +20792,10 @@ def __init__( self, **kwargs ): + """ + :keyword value: Required. Expression value. + :paramtype value: str + """ super(Expression, self).__init__(**kwargs) self.value = kwargs['value'] @@ -14949,10 +20811,10 @@ class Resource(msrest.serialization.Model): :vartype name: str :ivar type: The resource type. :vartype type: str - :param location: The resource location. - :type location: str - :param tags: A set of tags. The resource tags. - :type tags: dict[str, str] + :ivar location: The resource location. + :vartype location: str + :ivar tags: A set of tags. The resource tags. + :vartype tags: dict[str, str] :ivar e_tag: Etag identifies change in the resource. :vartype e_tag: str """ @@ -14977,6 +20839,12 @@ def __init__( self, **kwargs ): + """ + :keyword location: The resource location. + :paramtype location: str + :keyword tags: A set of tags. The resource tags. + :paramtype tags: dict[str, str] + """ super(Resource, self).__init__(**kwargs) self.id = None self.name = None @@ -14997,32 +20865,33 @@ class Factory(Resource): :vartype name: str :ivar type: The resource type. :vartype type: str - :param location: The resource location. - :type location: str - :param tags: A set of tags. The resource tags. - :type tags: dict[str, str] + :ivar location: The resource location. + :vartype location: str + :ivar tags: A set of tags. The resource tags. + :vartype tags: dict[str, str] :ivar e_tag: Etag identifies change in the resource. :vartype e_tag: str - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param identity: Managed service identity of the factory. - :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity + :vartype additional_properties: dict[str, any] + :ivar identity: Managed service identity of the factory. + :vartype identity: ~azure.mgmt.datafactory.models.FactoryIdentity :ivar provisioning_state: Factory provisioning state, example Succeeded. :vartype provisioning_state: str :ivar create_time: Time the factory was created in ISO8601 format. :vartype create_time: ~datetime.datetime :ivar version: Version of the factory. :vartype version: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration - :param global_parameters: List of parameters for factory. - :type global_parameters: dict[str, ~azure.mgmt.datafactory.models.GlobalParameterSpecification] - :param encryption: Properties to enable Customer Managed Key for the factory. - :type encryption: ~azure.mgmt.datafactory.models.EncryptionConfiguration - :param public_network_access: Whether or not public network access is allowed for the data + :ivar repo_configuration: Git repo information of the factory. + :vartype repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + :ivar global_parameters: List of parameters for factory. + :vartype global_parameters: dict[str, + ~azure.mgmt.datafactory.models.GlobalParameterSpecification] + :ivar encryption: Properties to enable Customer Managed Key for the factory. + :vartype encryption: ~azure.mgmt.datafactory.models.EncryptionConfiguration + :ivar public_network_access: Whether or not public network access is allowed for the data factory. Possible values include: "Enabled", "Disabled". - :type public_network_access: str or ~azure.mgmt.datafactory.models.PublicNetworkAccess + :vartype public_network_access: str or ~azure.mgmt.datafactory.models.PublicNetworkAccess """ _validation = { @@ -15057,6 +20926,27 @@ def __init__( self, **kwargs ): + """ + :keyword location: The resource location. + :paramtype location: str + :keyword tags: A set of tags. The resource tags. + :paramtype tags: dict[str, str] + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword identity: Managed service identity of the factory. + :paramtype identity: ~azure.mgmt.datafactory.models.FactoryIdentity + :keyword repo_configuration: Git repo information of the factory. + :paramtype repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + :keyword global_parameters: List of parameters for factory. + :paramtype global_parameters: dict[str, + ~azure.mgmt.datafactory.models.GlobalParameterSpecification] + :keyword encryption: Properties to enable Customer Managed Key for the factory. + :paramtype encryption: ~azure.mgmt.datafactory.models.EncryptionConfiguration + :keyword public_network_access: Whether or not public network access is allowed for the data + factory. Possible values include: "Enabled", "Disabled". + :paramtype public_network_access: str or ~azure.mgmt.datafactory.models.PublicNetworkAccess + """ super(Factory, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.identity = kwargs.get('identity', None) @@ -15077,18 +20967,18 @@ class FactoryRepoConfiguration(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of repo configuration.Constant filled by server. - :type type: str - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str + :ivar type: Required. Type of repo configuration.Constant filled by server. + :vartype type: str + :ivar account_name: Required. Account name. + :vartype account_name: str + :ivar repository_name: Required. Repository name. + :vartype repository_name: str + :ivar collaboration_branch: Required. Collaboration branch. + :vartype collaboration_branch: str + :ivar root_folder: Required. Root folder. + :vartype root_folder: str + :ivar last_commit_id: Last commit id. + :vartype last_commit_id: str """ _validation = { @@ -15116,6 +21006,18 @@ def __init__( self, **kwargs ): + """ + :keyword account_name: Required. Account name. + :paramtype account_name: str + :keyword repository_name: Required. Repository name. + :paramtype repository_name: str + :keyword collaboration_branch: Required. Collaboration branch. + :paramtype collaboration_branch: str + :keyword root_folder: Required. Root folder. + :paramtype root_folder: str + :keyword last_commit_id: Last commit id. + :paramtype last_commit_id: str + """ super(FactoryRepoConfiguration, self).__init__(**kwargs) self.type = None # type: Optional[str] self.account_name = kwargs['account_name'] @@ -15130,24 +21032,24 @@ class FactoryGitHubConfiguration(FactoryRepoConfiguration): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of repo configuration.Constant filled by server. - :type type: str - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param host_name: GitHub Enterprise host name. For example: https://github.mydomain.com. - :type host_name: str - :param client_id: GitHub bring your own app client id. - :type client_id: str - :param client_secret: GitHub bring your own app client secret information. - :type client_secret: ~azure.mgmt.datafactory.models.GitHubClientSecret + :ivar type: Required. Type of repo configuration.Constant filled by server. + :vartype type: str + :ivar account_name: Required. Account name. + :vartype account_name: str + :ivar repository_name: Required. Repository name. + :vartype repository_name: str + :ivar collaboration_branch: Required. Collaboration branch. + :vartype collaboration_branch: str + :ivar root_folder: Required. Root folder. + :vartype root_folder: str + :ivar last_commit_id: Last commit id. + :vartype last_commit_id: str + :ivar host_name: GitHub Enterprise host name. For example: https://github.mydomain.com. + :vartype host_name: str + :ivar client_id: GitHub bring your own app client id. + :vartype client_id: str + :ivar client_secret: GitHub bring your own app client secret information. + :vartype client_secret: ~azure.mgmt.datafactory.models.GitHubClientSecret """ _validation = { @@ -15174,6 +21076,24 @@ def __init__( self, **kwargs ): + """ + :keyword account_name: Required. Account name. + :paramtype account_name: str + :keyword repository_name: Required. Repository name. + :paramtype repository_name: str + :keyword collaboration_branch: Required. Collaboration branch. + :paramtype collaboration_branch: str + :keyword root_folder: Required. Root folder. + :paramtype root_folder: str + :keyword last_commit_id: Last commit id. + :paramtype last_commit_id: str + :keyword host_name: GitHub Enterprise host name. For example: https://github.mydomain.com. + :paramtype host_name: str + :keyword client_id: GitHub bring your own app client id. + :paramtype client_id: str + :keyword client_secret: GitHub bring your own app client secret information. + :paramtype client_secret: ~azure.mgmt.datafactory.models.GitHubClientSecret + """ super(FactoryGitHubConfiguration, self).__init__(**kwargs) self.type = 'FactoryGitHubConfiguration' # type: str self.host_name = kwargs.get('host_name', None) @@ -15188,15 +21108,15 @@ class FactoryIdentity(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. The identity type. Possible values include: "SystemAssigned", + :ivar type: Required. The identity type. Possible values include: "SystemAssigned", "UserAssigned", "SystemAssigned,UserAssigned". - :type type: str or ~azure.mgmt.datafactory.models.FactoryIdentityType + :vartype type: str or ~azure.mgmt.datafactory.models.FactoryIdentityType :ivar principal_id: The principal id of the identity. :vartype principal_id: str :ivar tenant_id: The client tenant id of the identity. :vartype tenant_id: str - :param user_assigned_identities: List of user assigned identities for the factory. - :type user_assigned_identities: dict[str, any] + :ivar user_assigned_identities: List of user assigned identities for the factory. + :vartype user_assigned_identities: dict[str, any] """ _validation = { @@ -15216,6 +21136,13 @@ def __init__( self, **kwargs ): + """ + :keyword type: Required. The identity type. Possible values include: "SystemAssigned", + "UserAssigned", "SystemAssigned,UserAssigned". + :paramtype type: str or ~azure.mgmt.datafactory.models.FactoryIdentityType + :keyword user_assigned_identities: List of user assigned identities for the factory. + :paramtype user_assigned_identities: dict[str, any] + """ super(FactoryIdentity, self).__init__(**kwargs) self.type = kwargs['type'] self.principal_id = None @@ -15228,10 +21155,10 @@ class FactoryListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of factories. - :type value: list[~azure.mgmt.datafactory.models.Factory] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: Required. List of factories. + :vartype value: list[~azure.mgmt.datafactory.models.Factory] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _validation = { @@ -15247,6 +21174,12 @@ def __init__( self, **kwargs ): + """ + :keyword value: Required. List of factories. + :paramtype value: list[~azure.mgmt.datafactory.models.Factory] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(FactoryListResponse, self).__init__(**kwargs) self.value = kwargs['value'] self.next_link = kwargs.get('next_link', None) @@ -15255,10 +21188,10 @@ def __init__( class FactoryRepoUpdate(msrest.serialization.Model): """Factory's git repo information. - :param factory_resource_id: The factory resource id. - :type factory_resource_id: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + :ivar factory_resource_id: The factory resource id. + :vartype factory_resource_id: str + :ivar repo_configuration: Git repo information of the factory. + :vartype repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration """ _attribute_map = { @@ -15270,6 +21203,12 @@ def __init__( self, **kwargs ): + """ + :keyword factory_resource_id: The factory resource id. + :paramtype factory_resource_id: str + :keyword repo_configuration: Git repo information of the factory. + :paramtype repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + """ super(FactoryRepoUpdate, self).__init__(**kwargs) self.factory_resource_id = kwargs.get('factory_resource_id', None) self.repo_configuration = kwargs.get('repo_configuration', None) @@ -15278,13 +21217,13 @@ def __init__( class FactoryUpdateParameters(msrest.serialization.Model): """Parameters for updating a factory resource. - :param tags: A set of tags. The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity - :param public_network_access: Whether or not public network access is allowed for the data + :ivar tags: A set of tags. The resource tags. + :vartype tags: dict[str, str] + :ivar identity: Managed service identity of the factory. + :vartype identity: ~azure.mgmt.datafactory.models.FactoryIdentity + :ivar public_network_access: Whether or not public network access is allowed for the data factory. Possible values include: "Enabled", "Disabled". - :type public_network_access: str or ~azure.mgmt.datafactory.models.PublicNetworkAccess + :vartype public_network_access: str or ~azure.mgmt.datafactory.models.PublicNetworkAccess """ _attribute_map = { @@ -15297,6 +21236,15 @@ def __init__( self, **kwargs ): + """ + :keyword tags: A set of tags. The resource tags. + :paramtype tags: dict[str, str] + :keyword identity: Managed service identity of the factory. + :paramtype identity: ~azure.mgmt.datafactory.models.FactoryIdentity + :keyword public_network_access: Whether or not public network access is allowed for the data + factory. Possible values include: "Enabled", "Disabled". + :paramtype public_network_access: str or ~azure.mgmt.datafactory.models.PublicNetworkAccess + """ super(FactoryUpdateParameters, self).__init__(**kwargs) self.tags = kwargs.get('tags', None) self.identity = kwargs.get('identity', None) @@ -15308,22 +21256,22 @@ class FactoryVSTSConfiguration(FactoryRepoConfiguration): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of repo configuration.Constant filled by server. - :type type: str - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param project_name: Required. VSTS project name. - :type project_name: str - :param tenant_id: VSTS tenant id. - :type tenant_id: str + :ivar type: Required. Type of repo configuration.Constant filled by server. + :vartype type: str + :ivar account_name: Required. Account name. + :vartype account_name: str + :ivar repository_name: Required. Repository name. + :vartype repository_name: str + :ivar collaboration_branch: Required. Collaboration branch. + :vartype collaboration_branch: str + :ivar root_folder: Required. Root folder. + :vartype root_folder: str + :ivar last_commit_id: Last commit id. + :vartype last_commit_id: str + :ivar project_name: Required. VSTS project name. + :vartype project_name: str + :ivar tenant_id: VSTS tenant id. + :vartype tenant_id: str """ _validation = { @@ -15350,6 +21298,22 @@ def __init__( self, **kwargs ): + """ + :keyword account_name: Required. Account name. + :paramtype account_name: str + :keyword repository_name: Required. Repository name. + :paramtype repository_name: str + :keyword collaboration_branch: Required. Collaboration branch. + :paramtype collaboration_branch: str + :keyword root_folder: Required. Root folder. + :paramtype root_folder: str + :keyword last_commit_id: Last commit id. + :paramtype last_commit_id: str + :keyword project_name: Required. VSTS project name. + :paramtype project_name: str + :keyword tenant_id: VSTS tenant id. + :paramtype tenant_id: str + """ super(FactoryVSTSConfiguration, self).__init__(**kwargs) self.type = 'FactoryVSTSConfiguration' # type: str self.project_name = kwargs['project_name'] @@ -15361,31 +21325,31 @@ class FileServerLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. Host name of the server. Type: string (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. Host name of the server. Type: string (or Expression with resultType string). - :type host: any - :param user_id: User ID to logon the server. Type: string (or Expression with resultType + :vartype host: any + :ivar user_id: User ID to logon the server. Type: string (or Expression with resultType string). - :type user_id: any - :param password: Password to logon the server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype user_id: any + :ivar password: Password to logon the server. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -15410,6 +21374,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. Host name of the server. Type: string (or Expression with resultType + string). + :paramtype host: any + :keyword user_id: User ID to logon the server. Type: string (or Expression with resultType + string). + :paramtype user_id: any + :keyword password: Password to logon the server. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(FileServerLinkedService, self).__init__(**kwargs) self.type = 'FileServer' # type: str self.host = kwargs['host'] @@ -15423,17 +21412,17 @@ class FileServerLocation(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any + :vartype file_name: any """ _validation = { @@ -15451,6 +21440,17 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + """ super(FileServerLocation, self).__init__(**kwargs) self.type = 'FileServerLocation' # type: str @@ -15460,47 +21460,47 @@ class FileServerReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string (or Expression with + :vartype recursive: any + :ivar wildcard_folder_path: FileServer wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: any - :param wildcard_file_name: FileServer wildcardFileName. Type: string (or Expression with + :vartype wildcard_folder_path: any + :ivar wildcard_file_name: FileServer wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: any - :param file_list_path: Point to a text file that lists each file (relative path to the path + :vartype wildcard_file_name: any + :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype file_list_path: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any - :param delete_files_after_completion: Indicates whether the source files need to be deleted + :vartype partition_root_path: any + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: any - :param modified_datetime_start: The start of file's modified datetime. Type: string (or + :vartype delete_files_after_completion: any + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any - :param file_filter: Specify a filter to be used to select a subset of files in the folderPath + :vartype modified_datetime_end: any + :ivar file_filter: Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). - :type file_filter: any + :vartype file_filter: any """ _validation = { @@ -15528,6 +21528,47 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword wildcard_folder_path: FileServer wildcardFolderPath. Type: string (or Expression with + resultType string). + :paramtype wildcard_folder_path: any + :keyword wildcard_file_name: FileServer wildcardFileName. Type: string (or Expression with + resultType string). + :paramtype wildcard_file_name: any + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: any + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + :keyword file_filter: Specify a filter to be used to select a subset of files in the folderPath + rather than all files. Type: string (or Expression with resultType string). + :paramtype file_filter: any + """ super(FileServerReadSettings, self).__init__(**kwargs) self.type = 'FileServerReadSettings' # type: str self.recursive = kwargs.get('recursive', None) @@ -15547,19 +21588,19 @@ class FileServerWriteSettings(StoreWriteSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: any + :vartype disable_metrics_collection: any + :ivar copy_behavior: The type of copy behavior for copy sink. + :vartype copy_behavior: any """ _validation = { @@ -15578,6 +21619,19 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword copy_behavior: The type of copy behavior for copy sink. + :paramtype copy_behavior: any + """ super(FileServerWriteSettings, self).__init__(**kwargs) self.type = 'FileServerWriteSettings' # type: str @@ -15587,47 +21641,47 @@ class FileShareDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param folder_path: The path of the on-premises file system. Type: string (or Expression with + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar folder_path: The path of the on-premises file system. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: The name of the on-premises file system. Type: string (or Expression with + :vartype folder_path: any + :ivar file_name: The name of the on-premises file system. Type: string (or Expression with resultType string). - :type file_name: any - :param modified_datetime_start: The start of file's modified datetime. Type: string (or + :vartype file_name: any + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any - :param format: The format of the files. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param file_filter: Specify a filter to be used to select a subset of files in the folderPath + :vartype modified_datetime_end: any + :ivar format: The format of the files. + :vartype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :ivar file_filter: Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). - :type file_filter: any - :param compression: The data compression method used for the file system. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + :vartype file_filter: any + :ivar compression: The data compression method used for the file system. + :vartype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -15658,6 +21712,47 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword folder_path: The path of the on-premises file system. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: The name of the on-premises file system. Type: string (or Expression with + resultType string). + :paramtype file_name: any + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + :keyword format: The format of the files. + :paramtype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :keyword file_filter: Specify a filter to be used to select a subset of files in the folderPath + rather than all files. Type: string (or Expression with resultType string). + :paramtype file_filter: any + :keyword compression: The data compression method used for the file system. + :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ super(FileShareDataset, self).__init__(**kwargs) self.type = 'FileShare' # type: str self.folder_path = kwargs.get('folder_path', None) @@ -15674,31 +21769,31 @@ class FileSystemSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: any + :vartype disable_metrics_collection: any + :ivar copy_behavior: The type of copy behavior for copy sink. + :vartype copy_behavior: any """ _validation = { @@ -15721,6 +21816,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword copy_behavior: The type of copy behavior for copy sink. + :paramtype copy_behavior: any + """ super(FileSystemSink, self).__init__(**kwargs) self.type = 'FileSystemSink' # type: str self.copy_behavior = kwargs.get('copy_behavior', None) @@ -15731,29 +21851,29 @@ class FileSystemSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype recursive: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -15775,6 +21895,29 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(FileSystemSource, self).__init__(**kwargs) self.type = 'FileSystemSource' # type: str self.recursive = kwargs.get('recursive', None) @@ -15786,23 +21929,23 @@ class FilterActivity(ControlActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param items: Required. Input array on which filter should be applied. - :type items: ~azure.mgmt.datafactory.models.Expression - :param condition: Required. Condition to be used for filtering the input. - :type condition: ~azure.mgmt.datafactory.models.Expression + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar items: Required. Input array on which filter should be applied. + :vartype items: ~azure.mgmt.datafactory.models.Expression + :ivar condition: Required. Condition to be used for filtering the input. + :vartype condition: ~azure.mgmt.datafactory.models.Expression """ _validation = { @@ -15827,6 +21970,23 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword items: Required. Input array on which filter should be applied. + :paramtype items: ~azure.mgmt.datafactory.models.Expression + :keyword condition: Required. Condition to be used for filtering the input. + :paramtype condition: ~azure.mgmt.datafactory.models.Expression + """ super(FilterActivity, self).__init__(**kwargs) self.type = 'Filter' # type: str self.items = kwargs['items'] @@ -15838,25 +21998,25 @@ class Flowlet(DataFlow): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of data flow.Constant filled by server. - :type type: str - :param description: The description of the data flow. - :type description: str - :param annotations: List of tags that can be used for describing the data flow. - :type annotations: list[any] - :param folder: The folder that this data flow is in. If not specified, Data flow will appear at + :ivar type: Required. Type of data flow.Constant filled by server. + :vartype type: str + :ivar description: The description of the data flow. + :vartype description: str + :ivar annotations: List of tags that can be used for describing the data flow. + :vartype annotations: list[any] + :ivar folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder - :param sources: List of sources in Flowlet. - :type sources: list[~azure.mgmt.datafactory.models.DataFlowSource] - :param sinks: List of sinks in Flowlet. - :type sinks: list[~azure.mgmt.datafactory.models.DataFlowSink] - :param transformations: List of transformations in Flowlet. - :type transformations: list[~azure.mgmt.datafactory.models.Transformation] - :param script: Flowlet script. - :type script: str - :param script_lines: Flowlet script lines. - :type script_lines: list[str] + :vartype folder: ~azure.mgmt.datafactory.models.DataFlowFolder + :ivar sources: List of sources in Flowlet. + :vartype sources: list[~azure.mgmt.datafactory.models.DataFlowSource] + :ivar sinks: List of sinks in Flowlet. + :vartype sinks: list[~azure.mgmt.datafactory.models.DataFlowSink] + :ivar transformations: List of transformations in Flowlet. + :vartype transformations: list[~azure.mgmt.datafactory.models.Transformation] + :ivar script: Flowlet script. + :vartype script: str + :ivar script_lines: Flowlet script lines. + :vartype script_lines: list[str] """ _validation = { @@ -15879,6 +22039,25 @@ def __init__( self, **kwargs ): + """ + :keyword description: The description of the data flow. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the data flow. + :paramtype annotations: list[any] + :keyword folder: The folder that this data flow is in. If not specified, Data flow will appear + at the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DataFlowFolder + :keyword sources: List of sources in Flowlet. + :paramtype sources: list[~azure.mgmt.datafactory.models.DataFlowSource] + :keyword sinks: List of sinks in Flowlet. + :paramtype sinks: list[~azure.mgmt.datafactory.models.DataFlowSink] + :keyword transformations: List of transformations in Flowlet. + :paramtype transformations: list[~azure.mgmt.datafactory.models.Transformation] + :keyword script: Flowlet script. + :paramtype script: str + :keyword script_lines: Flowlet script lines. + :paramtype script_lines: list[str] + """ super(Flowlet, self).__init__(**kwargs) self.type = 'Flowlet' # type: str self.sources = kwargs.get('sources', None) @@ -15893,28 +22072,28 @@ class ForEachActivity(ControlActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param is_sequential: Should the loop be executed in sequence or in parallel (max 50). - :type is_sequential: bool - :param batch_count: Batch count to be used for controlling the number of parallel execution + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar is_sequential: Should the loop be executed in sequence or in parallel (max 50). + :vartype is_sequential: bool + :ivar batch_count: Batch count to be used for controlling the number of parallel execution (when isSequential is set to false). - :type batch_count: int - :param items: Required. Collection to iterate. - :type items: ~azure.mgmt.datafactory.models.Expression - :param activities: Required. List of activities to execute . - :type activities: list[~azure.mgmt.datafactory.models.Activity] + :vartype batch_count: int + :ivar items: Required. Collection to iterate. + :vartype items: ~azure.mgmt.datafactory.models.Expression + :ivar activities: Required. List of activities to execute . + :vartype activities: list[~azure.mgmt.datafactory.models.Activity] """ _validation = { @@ -15942,6 +22121,28 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword is_sequential: Should the loop be executed in sequence or in parallel (max 50). + :paramtype is_sequential: bool + :keyword batch_count: Batch count to be used for controlling the number of parallel execution + (when isSequential is set to false). + :paramtype batch_count: int + :keyword items: Required. Collection to iterate. + :paramtype items: ~azure.mgmt.datafactory.models.Expression + :keyword activities: Required. List of activities to execute . + :paramtype activities: list[~azure.mgmt.datafactory.models.Activity] + """ super(ForEachActivity, self).__init__(**kwargs) self.type = 'ForEach' # type: str self.is_sequential = kwargs.get('is_sequential', None) @@ -15955,43 +22156,43 @@ class FtpReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or Expression with - resultType string). - :type wildcard_folder_path: any - :param wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType + :vartype recursive: any + :ivar wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_file_name: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype wildcard_folder_path: any + :ivar wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType + string). + :vartype wildcard_file_name: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any - :param delete_files_after_completion: Indicates whether the source files need to be deleted + :vartype partition_root_path: any + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: any - :param file_list_path: Point to a text file that lists each file (relative path to the path + :vartype delete_files_after_completion: any + :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: any - :param use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. - :type use_binary_transfer: bool - :param disable_chunking: If true, disable parallel reading within each file. Default is false. + :vartype file_list_path: any + :ivar use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. + :vartype use_binary_transfer: bool + :ivar disable_chunking: If true, disable parallel reading within each file. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_chunking: any + :vartype disable_chunking: any """ _validation = { @@ -16018,6 +22219,43 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or Expression with + resultType string). + :paramtype wildcard_folder_path: any + :keyword wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType + string). + :paramtype wildcard_file_name: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: any + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: any + :keyword use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. + :paramtype use_binary_transfer: bool + :keyword disable_chunking: If true, disable parallel reading within each file. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_chunking: any + """ super(FtpReadSettings, self).__init__(**kwargs) self.type = 'FtpReadSettings' # type: str self.recursive = kwargs.get('recursive', None) @@ -16036,44 +22274,44 @@ class FtpServerLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. Host name of the FTP server. Type: string (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. Host name of the FTP server. Type: string (or Expression with resultType string). - :type host: any - :param port: The TCP port number that the FTP server uses to listen for client connections. + :vartype host: any + :ivar port: The TCP port number that the FTP server uses to listen for client connections. Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: any - :param authentication_type: The authentication type to be used to connect to the FTP server. + :vartype port: any + :ivar authentication_type: The authentication type to be used to connect to the FTP server. Possible values include: "Basic", "Anonymous". - :type authentication_type: str or ~azure.mgmt.datafactory.models.FtpAuthenticationType - :param user_name: Username to logon the FTP server. Type: string (or Expression with resultType + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.FtpAuthenticationType + :ivar user_name: Username to logon the FTP server. Type: string (or Expression with resultType string). - :type user_name: any - :param password: Password to logon the FTP server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype user_name: any + :ivar password: Password to logon the FTP server. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param enable_ssl: If true, connect to the FTP server over SSL/TLS channel. Default value is + :vartype encrypted_credential: any + :ivar enable_ssl: If true, connect to the FTP server over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). - :type enable_ssl: any - :param enable_server_certificate_validation: If true, validate the FTP server SSL certificate + :vartype enable_ssl: any + :ivar enable_server_certificate_validation: If true, validate the FTP server SSL certificate when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: any + :vartype enable_server_certificate_validation: any """ _validation = { @@ -16102,6 +22340,44 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. Host name of the FTP server. Type: string (or Expression with + resultType string). + :paramtype host: any + :keyword port: The TCP port number that the FTP server uses to listen for client connections. + Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. + :paramtype port: any + :keyword authentication_type: The authentication type to be used to connect to the FTP server. + Possible values include: "Basic", "Anonymous". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.FtpAuthenticationType + :keyword user_name: Username to logon the FTP server. Type: string (or Expression with + resultType string). + :paramtype user_name: any + :keyword password: Password to logon the FTP server. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword enable_ssl: If true, connect to the FTP server over SSL/TLS channel. Default value is + true. Type: boolean (or Expression with resultType boolean). + :paramtype enable_ssl: any + :keyword enable_server_certificate_validation: If true, validate the FTP server SSL certificate + when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with + resultType boolean). + :paramtype enable_server_certificate_validation: any + """ super(FtpServerLinkedService, self).__init__(**kwargs) self.type = 'FtpServer' # type: str self.host = kwargs['host'] @@ -16119,17 +22395,17 @@ class FtpServerLocation(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any + :vartype file_name: any """ _validation = { @@ -16147,6 +22423,17 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + """ super(FtpServerLocation, self).__init__(**kwargs) self.type = 'FtpServerLocation' # type: str @@ -16154,11 +22441,11 @@ def __init__( class GetDataFactoryOperationStatusResponse(msrest.serialization.Model): """Response body structure for get data factory operation status. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param status: Status of the operation. - :type status: str + :vartype additional_properties: dict[str, any] + :ivar status: Status of the operation. + :vartype status: str """ _attribute_map = { @@ -16170,6 +22457,13 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword status: Status of the operation. + :paramtype status: str + """ super(GetDataFactoryOperationStatusResponse, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.status = kwargs.get('status', None) @@ -16180,31 +22474,31 @@ class GetMetadataActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param dataset: Required. GetMetadata activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param field_list: Fields of metadata to get from dataset. - :type field_list: list[any] - :param store_settings: GetMetadata activity store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param format_settings: GetMetadata activity format settings. - :type format_settings: ~azure.mgmt.datafactory.models.FormatReadSettings + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar dataset: Required. GetMetadata activity dataset reference. + :vartype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :ivar field_list: Fields of metadata to get from dataset. + :vartype field_list: list[any] + :ivar store_settings: GetMetadata activity store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :ivar format_settings: GetMetadata activity format settings. + :vartype format_settings: ~azure.mgmt.datafactory.models.FormatReadSettings """ _validation = { @@ -16232,6 +22526,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword dataset: Required. GetMetadata activity dataset reference. + :paramtype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :keyword field_list: Fields of metadata to get from dataset. + :paramtype field_list: list[any] + :keyword store_settings: GetMetadata activity store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :keyword format_settings: GetMetadata activity format settings. + :paramtype format_settings: ~azure.mgmt.datafactory.models.FormatReadSettings + """ super(GetMetadataActivity, self).__init__(**kwargs) self.type = 'GetMetadata' # type: str self.dataset = kwargs['dataset'] @@ -16243,8 +22562,8 @@ def __init__( class GetSsisObjectMetadataRequest(msrest.serialization.Model): """The request payload of get SSIS object metadata. - :param metadata_path: Metadata path. - :type metadata_path: str + :ivar metadata_path: Metadata path. + :vartype metadata_path: str """ _attribute_map = { @@ -16255,6 +22574,10 @@ def __init__( self, **kwargs ): + """ + :keyword metadata_path: Metadata path. + :paramtype metadata_path: str + """ super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) self.metadata_path = kwargs.get('metadata_path', None) @@ -16264,14 +22587,14 @@ class GitHubAccessTokenRequest(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param git_hub_access_code: Required. GitHub access code. - :type git_hub_access_code: str - :param git_hub_client_id: GitHub application client ID. - :type git_hub_client_id: str - :param git_hub_client_secret: GitHub bring your own app client secret information. - :type git_hub_client_secret: ~azure.mgmt.datafactory.models.GitHubClientSecret - :param git_hub_access_token_base_url: Required. GitHub access token base URL. - :type git_hub_access_token_base_url: str + :ivar git_hub_access_code: Required. GitHub access code. + :vartype git_hub_access_code: str + :ivar git_hub_client_id: GitHub application client ID. + :vartype git_hub_client_id: str + :ivar git_hub_client_secret: GitHub bring your own app client secret information. + :vartype git_hub_client_secret: ~azure.mgmt.datafactory.models.GitHubClientSecret + :ivar git_hub_access_token_base_url: Required. GitHub access token base URL. + :vartype git_hub_access_token_base_url: str """ _validation = { @@ -16290,6 +22613,16 @@ def __init__( self, **kwargs ): + """ + :keyword git_hub_access_code: Required. GitHub access code. + :paramtype git_hub_access_code: str + :keyword git_hub_client_id: GitHub application client ID. + :paramtype git_hub_client_id: str + :keyword git_hub_client_secret: GitHub bring your own app client secret information. + :paramtype git_hub_client_secret: ~azure.mgmt.datafactory.models.GitHubClientSecret + :keyword git_hub_access_token_base_url: Required. GitHub access token base URL. + :paramtype git_hub_access_token_base_url: str + """ super(GitHubAccessTokenRequest, self).__init__(**kwargs) self.git_hub_access_code = kwargs['git_hub_access_code'] self.git_hub_client_id = kwargs.get('git_hub_client_id', None) @@ -16300,8 +22633,8 @@ def __init__( class GitHubAccessTokenResponse(msrest.serialization.Model): """Get GitHub access token response definition. - :param git_hub_access_token: GitHub access token. - :type git_hub_access_token: str + :ivar git_hub_access_token: GitHub access token. + :vartype git_hub_access_token: str """ _attribute_map = { @@ -16312,6 +22645,10 @@ def __init__( self, **kwargs ): + """ + :keyword git_hub_access_token: GitHub access token. + :paramtype git_hub_access_token: str + """ super(GitHubAccessTokenResponse, self).__init__(**kwargs) self.git_hub_access_token = kwargs.get('git_hub_access_token', None) @@ -16319,10 +22656,10 @@ def __init__( class GitHubClientSecret(msrest.serialization.Model): """Client secret information for factory's bring your own app repository configuration. - :param byoa_secret_akv_url: Bring your own app client secret AKV URL. - :type byoa_secret_akv_url: str - :param byoa_secret_name: Bring your own app client secret name in AKV. - :type byoa_secret_name: str + :ivar byoa_secret_akv_url: Bring your own app client secret AKV URL. + :vartype byoa_secret_akv_url: str + :ivar byoa_secret_name: Bring your own app client secret name in AKV. + :vartype byoa_secret_name: str """ _attribute_map = { @@ -16334,6 +22671,12 @@ def __init__( self, **kwargs ): + """ + :keyword byoa_secret_akv_url: Bring your own app client secret AKV URL. + :paramtype byoa_secret_akv_url: str + :keyword byoa_secret_name: Bring your own app client secret name in AKV. + :paramtype byoa_secret_name: str + """ super(GitHubClientSecret, self).__init__(**kwargs) self.byoa_secret_akv_url = kwargs.get('byoa_secret_akv_url', None) self.byoa_secret_name = kwargs.get('byoa_secret_name', None) @@ -16344,11 +22687,11 @@ class GlobalParameterSpecification(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. Global Parameter type. Possible values include: "Object", "String", + :ivar type: Required. Global Parameter type. Possible values include: "Object", "String", "Int", "Float", "Bool", "Array". - :type type: str or ~azure.mgmt.datafactory.models.GlobalParameterType - :param value: Required. Value of parameter. - :type value: any + :vartype type: str or ~azure.mgmt.datafactory.models.GlobalParameterType + :ivar value: Required. Value of parameter. + :vartype value: any """ _validation = { @@ -16365,6 +22708,13 @@ def __init__( self, **kwargs ): + """ + :keyword type: Required. Global Parameter type. Possible values include: "Object", "String", + "Int", "Float", "Bool", "Array". + :paramtype type: str or ~azure.mgmt.datafactory.models.GlobalParameterType + :keyword value: Required. Value of parameter. + :paramtype value: any + """ super(GlobalParameterSpecification, self).__init__(**kwargs) self.type = kwargs['type'] self.value = kwargs['value'] @@ -16375,56 +22725,56 @@ class GoogleAdWordsLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param client_customer_id: Required. The Client customer ID of the AdWords account that you - want to fetch report data for. - :type client_customer_id: any - :param developer_token: Required. The developer token associated with the manager account that + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar client_customer_id: Required. The Client customer ID of the AdWords account that you want + to fetch report data for. + :vartype client_customer_id: any + :ivar developer_token: Required. The developer token associated with the manager account that you use to grant access to the AdWords API. - :type developer_token: ~azure.mgmt.datafactory.models.SecretBase - :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for + :vartype developer_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar authentication_type: Required. The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values include: "ServiceAuthentication", "UserAuthentication". - :type authentication_type: str or + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType - :param refresh_token: The refresh token obtained from Google for authorizing access to AdWords + :ivar refresh_token: The refresh token obtained from Google for authorizing access to AdWords for UserAuthentication. - :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase - :param client_id: The client id of the google application used to acquire the refresh token. + :vartype refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar client_id: The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). - :type client_id: any - :param client_secret: The client secret of the google application used to acquire the refresh + :vartype client_id: any + :ivar client_secret: The client secret of the google application used to acquire the refresh token. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param email: The service account email ID that is used for ServiceAuthentication and can only + :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar email: The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. - :type email: any - :param key_file_path: The full path to the .p12 key file that is used to authenticate the + :vartype email: any + :ivar key_file_path: The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. - :type key_file_path: any - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + :vartype key_file_path: any + :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: any - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + :vartype trusted_cert_path: any + :ivar use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_system_trust_store: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -16458,6 +22808,56 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword client_customer_id: Required. The Client customer ID of the AdWords account that you + want to fetch report data for. + :paramtype client_customer_id: any + :keyword developer_token: Required. The developer token associated with the manager account + that you use to grant access to the AdWords API. + :paramtype developer_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword authentication_type: Required. The OAuth 2.0 authentication mechanism used for + authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values + include: "ServiceAuthentication", "UserAuthentication". + :paramtype authentication_type: str or + ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType + :keyword refresh_token: The refresh token obtained from Google for authorizing access to + AdWords for UserAuthentication. + :paramtype refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword client_id: The client id of the google application used to acquire the refresh token. + Type: string (or Expression with resultType string). + :paramtype client_id: any + :keyword client_secret: The client secret of the google application used to acquire the refresh + token. + :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword email: The service account email ID that is used for ServiceAuthentication and can + only be used on self-hosted IR. + :paramtype email: any + :keyword key_file_path: The full path to the .p12 key file that is used to authenticate the + service account email address and can only be used on self-hosted IR. + :paramtype key_file_path: any + :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates + for verifying the server when connecting over SSL. This property can only be set when using SSL + on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :paramtype trusted_cert_path: any + :keyword use_system_trust_store: Specifies whether to use a CA certificate from the system + trust store or from a specified PEM file. The default value is false. + :paramtype use_system_trust_store: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(GoogleAdWordsLinkedService, self).__init__(**kwargs) self.type = 'GoogleAdWords' # type: str self.client_customer_id = kwargs['client_customer_id'] @@ -16478,30 +22878,30 @@ class GoogleAdWordsObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -16526,6 +22926,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(GoogleAdWordsObjectDataset, self).__init__(**kwargs) self.type = 'GoogleAdWordsObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -16536,32 +22960,32 @@ class GoogleAdWordsSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -16584,6 +23008,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(GoogleAdWordsSource, self).__init__(**kwargs) self.type = 'GoogleAdWordsSource' # type: str self.query = kwargs.get('query', None) @@ -16594,58 +23044,58 @@ class GoogleBigQueryLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param project: Required. The default BigQuery project to query against. - :type project: any - :param additional_projects: A comma-separated list of public BigQuery projects to access. - :type additional_projects: any - :param request_google_drive_scope: Whether to request access to Google Drive. Allowing Google + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar project: Required. The default BigQuery project to query against. + :vartype project: any + :ivar additional_projects: A comma-separated list of public BigQuery projects to access. + :vartype additional_projects: any + :ivar request_google_drive_scope: Whether to request access to Google Drive. Allowing Google Drive access enables support for federated tables that combine BigQuery data with data from Google Drive. The default value is false. - :type request_google_drive_scope: any - :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for + :vartype request_google_drive_scope: any + :ivar authentication_type: Required. The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values include: "ServiceAuthentication", "UserAuthentication". - :type authentication_type: str or + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.GoogleBigQueryAuthenticationType - :param refresh_token: The refresh token obtained from Google for authorizing access to BigQuery + :ivar refresh_token: The refresh token obtained from Google for authorizing access to BigQuery for UserAuthentication. - :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase - :param client_id: The client id of the google application used to acquire the refresh token. + :vartype refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar client_id: The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). - :type client_id: any - :param client_secret: The client secret of the google application used to acquire the refresh + :vartype client_id: any + :ivar client_secret: The client secret of the google application used to acquire the refresh token. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param email: The service account email ID that is used for ServiceAuthentication and can only + :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar email: The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. - :type email: any - :param key_file_path: The full path to the .p12 key file that is used to authenticate the + :vartype email: any + :ivar key_file_path: The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. - :type key_file_path: any - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + :vartype key_file_path: any + :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: any - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + :vartype trusted_cert_path: any + :ivar use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_system_trust_store: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -16679,6 +23129,58 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword project: Required. The default BigQuery project to query against. + :paramtype project: any + :keyword additional_projects: A comma-separated list of public BigQuery projects to access. + :paramtype additional_projects: any + :keyword request_google_drive_scope: Whether to request access to Google Drive. Allowing Google + Drive access enables support for federated tables that combine BigQuery data with data from + Google Drive. The default value is false. + :paramtype request_google_drive_scope: any + :keyword authentication_type: Required. The OAuth 2.0 authentication mechanism used for + authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values + include: "ServiceAuthentication", "UserAuthentication". + :paramtype authentication_type: str or + ~azure.mgmt.datafactory.models.GoogleBigQueryAuthenticationType + :keyword refresh_token: The refresh token obtained from Google for authorizing access to + BigQuery for UserAuthentication. + :paramtype refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword client_id: The client id of the google application used to acquire the refresh token. + Type: string (or Expression with resultType string). + :paramtype client_id: any + :keyword client_secret: The client secret of the google application used to acquire the refresh + token. + :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword email: The service account email ID that is used for ServiceAuthentication and can + only be used on self-hosted IR. + :paramtype email: any + :keyword key_file_path: The full path to the .p12 key file that is used to authenticate the + service account email address and can only be used on self-hosted IR. + :paramtype key_file_path: any + :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates + for verifying the server when connecting over SSL. This property can only be set when using SSL + on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :paramtype trusted_cert_path: any + :keyword use_system_trust_store: Specifies whether to use a CA certificate from the system + trust store or from a specified PEM file. The default value is false. + :paramtype use_system_trust_store: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(GoogleBigQueryLinkedService, self).__init__(**kwargs) self.type = 'GoogleBigQuery' # type: str self.project = kwargs['project'] @@ -16700,37 +23202,37 @@ class GoogleBigQueryObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using database + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using database + table properties instead. - :type table_name: any - :param table: The table name of the Google BigQuery. Type: string (or Expression with - resultType string). - :type table: any - :param dataset: The database name of the Google BigQuery. Type: string (or Expression with + :vartype table_name: any + :ivar table: The table name of the Google BigQuery. Type: string (or Expression with resultType + string). + :vartype table: any + :ivar dataset: The database name of the Google BigQuery. Type: string (or Expression with resultType string). - :type dataset: any + :vartype dataset: any """ _validation = { @@ -16757,6 +23259,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using database + table + properties instead. + :paramtype table_name: any + :keyword table: The table name of the Google BigQuery. Type: string (or Expression with + resultType string). + :paramtype table: any + :keyword dataset: The database name of the Google BigQuery. Type: string (or Expression with + resultType string). + :paramtype dataset: any + """ super(GoogleBigQueryObjectDataset, self).__init__(**kwargs) self.type = 'GoogleBigQueryObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -16769,32 +23302,32 @@ class GoogleBigQuerySource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -16817,6 +23350,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(GoogleBigQuerySource, self).__init__(**kwargs) self.type = 'GoogleBigQuerySource' # type: str self.query = kwargs.get('query', None) @@ -16827,34 +23386,34 @@ class GoogleCloudStorageLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param access_key_id: The access key identifier of the Google Cloud Storage Identity and Access + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar access_key_id: The access key identifier of the Google Cloud Storage Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). - :type access_key_id: any - :param secret_access_key: The secret access key of the Google Cloud Storage Identity and Access + :vartype access_key_id: any + :ivar secret_access_key: The secret access key of the Google Cloud Storage Identity and Access Management (IAM) user. - :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_url: This value specifies the endpoint to access with the Google Cloud Storage + :vartype secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar service_url: This value specifies the endpoint to access with the Google Cloud Storage Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). - :type service_url: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype service_url: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -16878,6 +23437,34 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword access_key_id: The access key identifier of the Google Cloud Storage Identity and + Access Management (IAM) user. Type: string (or Expression with resultType string). + :paramtype access_key_id: any + :keyword secret_access_key: The secret access key of the Google Cloud Storage Identity and + Access Management (IAM) user. + :paramtype secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword service_url: This value specifies the endpoint to access with the Google Cloud Storage + Connector. This is an optional property; change it only if you want to try a different service + endpoint or want to switch between https and http. Type: string (or Expression with resultType + string). + :paramtype service_url: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(GoogleCloudStorageLinkedService, self).__init__(**kwargs) self.type = 'GoogleCloudStorage' # type: str self.access_key_id = kwargs.get('access_key_id', None) @@ -16891,23 +23478,23 @@ class GoogleCloudStorageLocation(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any - :param bucket_name: Specify the bucketName of Google Cloud Storage. Type: string (or Expression + :vartype file_name: any + :ivar bucket_name: Specify the bucketName of Google Cloud Storage. Type: string (or Expression with resultType string). - :type bucket_name: any - :param version: Specify the version of Google Cloud Storage. Type: string (or Expression with + :vartype bucket_name: any + :ivar version: Specify the version of Google Cloud Storage. Type: string (or Expression with resultType string). - :type version: any + :vartype version: any """ _validation = { @@ -16927,6 +23514,23 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + :keyword bucket_name: Specify the bucketName of Google Cloud Storage. Type: string (or + Expression with resultType string). + :paramtype bucket_name: any + :keyword version: Specify the version of Google Cloud Storage. Type: string (or Expression with + resultType string). + :paramtype version: any + """ super(GoogleCloudStorageLocation, self).__init__(**kwargs) self.type = 'GoogleCloudStorageLocation' # type: str self.bucket_name = kwargs.get('bucket_name', None) @@ -16938,47 +23542,47 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param wildcard_folder_path: Google Cloud Storage wildcardFolderPath. Type: string (or + :vartype recursive: any + :ivar wildcard_folder_path: Google Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: any - :param wildcard_file_name: Google Cloud Storage wildcardFileName. Type: string (or Expression + :vartype wildcard_folder_path: any + :ivar wildcard_file_name: Google Cloud Storage wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: any - :param prefix: The prefix filter for the Google Cloud Storage object name. Type: string (or + :vartype wildcard_file_name: any + :ivar prefix: The prefix filter for the Google Cloud Storage object name. Type: string (or Expression with resultType string). - :type prefix: any - :param file_list_path: Point to a text file that lists each file (relative path to the path + :vartype prefix: any + :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype file_list_path: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any - :param delete_files_after_completion: Indicates whether the source files need to be deleted + :vartype partition_root_path: any + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: any - :param modified_datetime_start: The start of file's modified datetime. Type: string (or + :vartype delete_files_after_completion: any + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any + :vartype modified_datetime_end: any """ _validation = { @@ -17006,6 +23610,47 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword wildcard_folder_path: Google Cloud Storage wildcardFolderPath. Type: string (or + Expression with resultType string). + :paramtype wildcard_folder_path: any + :keyword wildcard_file_name: Google Cloud Storage wildcardFileName. Type: string (or Expression + with resultType string). + :paramtype wildcard_file_name: any + :keyword prefix: The prefix filter for the Google Cloud Storage object name. Type: string (or + Expression with resultType string). + :paramtype prefix: any + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: any + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + """ super(GoogleCloudStorageReadSettings, self).__init__(**kwargs) self.type = 'GoogleCloudStorageReadSettings' # type: str self.recursive = kwargs.get('recursive', None) @@ -17025,28 +23670,28 @@ class GreenplumLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: An ODBC connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_string: any + :ivar pwd: The Azure key vault secret reference of password in connection string. + :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -17069,6 +23714,28 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword pwd: The Azure key vault secret reference of password in connection string. + :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(GreenplumLinkedService, self).__init__(**kwargs) self.type = 'Greenplum' # type: str self.connection_string = kwargs.get('connection_string', None) @@ -17081,32 +23748,32 @@ class GreenplumSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -17129,6 +23796,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(GreenplumSource, self).__init__(**kwargs) self.type = 'GreenplumSource' # type: str self.query = kwargs.get('query', None) @@ -17139,36 +23832,36 @@ class GreenplumTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param table: The table name of Greenplum. Type: string (or Expression with resultType string). - :type table: any - :param schema_type_properties_schema: The schema name of Greenplum. Type: string (or Expression + :vartype table_name: any + :ivar table: The table name of Greenplum. Type: string (or Expression with resultType string). + :vartype table: any + :ivar schema_type_properties_schema: The schema name of Greenplum. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any + :vartype schema_type_properties_schema: any """ _validation = { @@ -17195,6 +23888,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword table: The table name of Greenplum. Type: string (or Expression with resultType + string). + :paramtype table: any + :keyword schema_type_properties_schema: The schema name of Greenplum. Type: string (or + Expression with resultType string). + :paramtype schema_type_properties_schema: any + """ super(GreenplumTableDataset, self).__init__(**kwargs) self.type = 'GreenplumTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -17207,51 +23931,51 @@ class HBaseLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. The IP address or host name of the HBase server. (i.e. 192.168.222.160). - :type host: any - :param port: The TCP port that the HBase instance uses to listen for client connections. The + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. The IP address or host name of the HBase server. (i.e. 192.168.222.160). + :vartype host: any + :ivar port: The TCP port that the HBase instance uses to listen for client connections. The default value is 9090. - :type port: any - :param http_path: The partial URL corresponding to the HBase server. (i.e. + :vartype port: any + :ivar http_path: The partial URL corresponding to the HBase server. (i.e. /gateway/sandbox/hbase/version). - :type http_path: any - :param authentication_type: Required. The authentication mechanism to use to connect to the + :vartype http_path: any + :ivar authentication_type: Required. The authentication mechanism to use to connect to the HBase server. Possible values include: "Anonymous", "Basic". - :type authentication_type: str or ~azure.mgmt.datafactory.models.HBaseAuthenticationType - :param username: The user name used to connect to the HBase instance. - :type username: any - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.HBaseAuthenticationType + :ivar username: The user name used to connect to the HBase instance. + :vartype username: any + :ivar password: The password corresponding to the user name. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :type enable_ssl: any - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + :vartype enable_ssl: any + :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: any - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + :vartype trusted_cert_path: any + :ivar allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: any - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + :vartype allow_host_name_cn_mismatch: any + :ivar allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype allow_self_signed_server_cert: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -17284,6 +24008,52 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. The IP address or host name of the HBase server. (i.e. + 192.168.222.160). + :paramtype host: any + :keyword port: The TCP port that the HBase instance uses to listen for client connections. The + default value is 9090. + :paramtype port: any + :keyword http_path: The partial URL corresponding to the HBase server. (i.e. + /gateway/sandbox/hbase/version). + :paramtype http_path: any + :keyword authentication_type: Required. The authentication mechanism to use to connect to the + HBase server. Possible values include: "Anonymous", "Basic". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.HBaseAuthenticationType + :keyword username: The user name used to connect to the HBase instance. + :paramtype username: any + :keyword password: The password corresponding to the user name. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword enable_ssl: Specifies whether the connections to the server are encrypted using SSL. + The default value is false. + :paramtype enable_ssl: any + :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates + for verifying the server when connecting over SSL. This property can only be set when using SSL + on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :paramtype trusted_cert_path: any + :keyword allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :paramtype allow_host_name_cn_mismatch: any + :keyword allow_self_signed_server_cert: Specifies whether to allow self-signed certificates + from the server. The default value is false. + :paramtype allow_self_signed_server_cert: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(HBaseLinkedService, self).__init__(**kwargs) self.type = 'HBase' # type: str self.host = kwargs['host'] @@ -17304,30 +24074,30 @@ class HBaseObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -17352,6 +24122,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(HBaseObjectDataset, self).__init__(**kwargs) self.type = 'HBaseObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -17362,32 +24156,32 @@ class HBaseSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -17410,6 +24204,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(HBaseSource, self).__init__(**kwargs) self.type = 'HBaseSource' # type: str self.query = kwargs.get('query', None) @@ -17420,34 +24240,34 @@ class HdfsLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param url: Required. The URL of the HDFS service endpoint, e.g. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar url: Required. The URL of the HDFS service endpoint, e.g. http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). - :type url: any - :param authentication_type: Type of authentication used to connect to the HDFS. Possible values + :vartype url: any + :ivar authentication_type: Type of authentication used to connect to the HDFS. Possible values are: Anonymous and Windows. Type: string (or Expression with resultType string). - :type authentication_type: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype authentication_type: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param user_name: User name for Windows authentication. Type: string (or Expression with + :vartype encrypted_credential: any + :ivar user_name: User name for Windows authentication. Type: string (or Expression with resultType string). - :type user_name: any - :param password: Password for Windows authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase + :vartype user_name: any + :ivar password: Password for Windows authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -17473,6 +24293,34 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword url: Required. The URL of the HDFS service endpoint, e.g. + http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). + :paramtype url: any + :keyword authentication_type: Type of authentication used to connect to the HDFS. Possible + values are: Anonymous and Windows. Type: string (or Expression with resultType string). + :paramtype authentication_type: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword user_name: User name for Windows authentication. Type: string (or Expression with + resultType string). + :paramtype user_name: any + :keyword password: Password for Windows authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + """ super(HdfsLinkedService, self).__init__(**kwargs) self.type = 'Hdfs' # type: str self.url = kwargs['url'] @@ -17487,17 +24335,17 @@ class HdfsLocation(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any + :vartype file_name: any """ _validation = { @@ -17515,6 +24363,17 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + """ super(HdfsLocation, self).__init__(**kwargs) self.type = 'HdfsLocation' # type: str @@ -17524,46 +24383,46 @@ class HdfsReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or Expression with + :vartype recursive: any + :ivar wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: any - :param wildcard_file_name: HDFS wildcardFileName. Type: string (or Expression with resultType + :vartype wildcard_folder_path: any + :ivar wildcard_file_name: HDFS wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: any - :param file_list_path: Point to a text file that lists each file (relative path to the path + :vartype wildcard_file_name: any + :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype file_list_path: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any - :param modified_datetime_start: The start of file's modified datetime. Type: string (or + :vartype partition_root_path: any + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any - :param distcp_settings: Specifies Distcp-related settings. - :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings - :param delete_files_after_completion: Indicates whether the source files need to be deleted + :vartype modified_datetime_end: any + :ivar distcp_settings: Specifies Distcp-related settings. + :vartype distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: any + :vartype delete_files_after_completion: any """ _validation = { @@ -17591,6 +24450,46 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or Expression with + resultType string). + :paramtype wildcard_folder_path: any + :keyword wildcard_file_name: HDFS wildcardFileName. Type: string (or Expression with resultType + string). + :paramtype wildcard_file_name: any + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + :keyword distcp_settings: Specifies Distcp-related settings. + :paramtype distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: any + """ super(HdfsReadSettings, self).__init__(**kwargs) self.type = 'HdfsReadSettings' # type: str self.recursive = kwargs.get('recursive', None) @@ -17610,28 +24509,28 @@ class HdfsSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param distcp_settings: Specifies Distcp-related settings. - :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + :vartype recursive: any + :ivar distcp_settings: Specifies Distcp-related settings. + :vartype distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings """ _validation = { @@ -17653,6 +24552,28 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword distcp_settings: Specifies Distcp-related settings. + :paramtype distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + """ super(HdfsSource, self).__init__(**kwargs) self.type = 'HdfsSource' # type: str self.recursive = kwargs.get('recursive', None) @@ -17664,40 +24585,40 @@ class HDInsightHiveActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[any] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param script_path: Script path. Type: string (or Expression with resultType string). - :type script_path: any - :param script_linked_service: Script linked service reference. - :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param defines: Allows user to specify defines for Hive job request. - :type defines: dict[str, any] - :param variables: User specified arguments under hivevar namespace. - :type variables: list[any] - :param query_timeout: Query timeout value (in minutes). Effective when the HDInsight cluster - is with ESP (Enterprise Security Package). - :type query_timeout: int + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar storage_linked_services: Storage linked service references. + :vartype storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :ivar arguments: User specified arguments to HDInsightActivity. + :vartype arguments: list[any] + :ivar get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :vartype get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :ivar script_path: Script path. Type: string (or Expression with resultType string). + :vartype script_path: any + :ivar script_linked_service: Script linked service reference. + :vartype script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar defines: Allows user to specify defines for Hive job request. + :vartype defines: dict[str, any] + :ivar variables: User specified arguments under hivevar namespace. + :vartype variables: list[any] + :ivar query_timeout: Query timeout value (in minutes). Effective when the HDInsight cluster is + with ESP (Enterprise Security Package). + :vartype query_timeout: int """ _validation = { @@ -17728,6 +24649,42 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword storage_linked_services: Storage linked service references. + :paramtype storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :keyword arguments: User specified arguments to HDInsightActivity. + :paramtype arguments: list[any] + :keyword get_debug_info: Debug info option. Possible values include: "None", "Always", + "Failure". + :paramtype get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :keyword script_path: Script path. Type: string (or Expression with resultType string). + :paramtype script_path: any + :keyword script_linked_service: Script linked service reference. + :paramtype script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword defines: Allows user to specify defines for Hive job request. + :paramtype defines: dict[str, any] + :keyword variables: User specified arguments under hivevar namespace. + :paramtype variables: list[any] + :keyword query_timeout: Query timeout value (in minutes). Effective when the HDInsight cluster + is with ESP (Enterprise Security Package). + :paramtype query_timeout: int + """ super(HDInsightHiveActivity, self).__init__(**kwargs) self.type = 'HDInsightHive' # type: str self.storage_linked_services = kwargs.get('storage_linked_services', None) @@ -17745,42 +24702,42 @@ class HDInsightLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param cluster_uri: Required. HDInsight cluster URI. Type: string (or Expression with - resultType string). - :type cluster_uri: any - :param user_name: HDInsight cluster user name. Type: string (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar cluster_uri: Required. HDInsight cluster URI. Type: string (or Expression with resultType string). - :type user_name: any - :param password: HDInsight cluster password. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param linked_service_name: The Azure Storage linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param hcatalog_linked_service_name: A reference to the Azure SQL linked service that points to + :vartype cluster_uri: any + :ivar user_name: HDInsight cluster user name. Type: string (or Expression with resultType + string). + :vartype user_name: any + :ivar password: HDInsight cluster password. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar linked_service_name: The Azure Storage linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar hcatalog_linked_service_name: A reference to the Azure SQL linked service that points to the HCatalog database. - :type hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security + :vartype encrypted_credential: any + :ivar is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security Package). Type: Boolean. - :type is_esp_enabled: any - :param file_system: Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. + :vartype is_esp_enabled: any + :ivar file_system: Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. Type: string (or Expression with resultType string). - :type file_system: any + :vartype file_system: any """ _validation = { @@ -17809,6 +24766,42 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword cluster_uri: Required. HDInsight cluster URI. Type: string (or Expression with + resultType string). + :paramtype cluster_uri: any + :keyword user_name: HDInsight cluster user name. Type: string (or Expression with resultType + string). + :paramtype user_name: any + :keyword password: HDInsight cluster password. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword linked_service_name: The Azure Storage linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword hcatalog_linked_service_name: A reference to the Azure SQL linked service that points + to the HCatalog database. + :paramtype hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security + Package). Type: Boolean. + :paramtype is_esp_enabled: any + :keyword file_system: Specify the FileSystem if the main storage for the HDInsight is ADLS + Gen2. Type: string (or Expression with resultType string). + :paramtype file_system: any + """ super(HDInsightLinkedService, self).__init__(**kwargs) self.type = 'HDInsight' # type: str self.cluster_uri = kwargs['cluster_uri'] @@ -17826,39 +24819,39 @@ class HDInsightMapReduceActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[any] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param class_name: Required. Class name. Type: string (or Expression with resultType string). - :type class_name: any - :param jar_file_path: Required. Jar path. Type: string (or Expression with resultType string). - :type jar_file_path: any - :param jar_linked_service: Jar linked service reference. - :type jar_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param jar_libs: Jar libs. - :type jar_libs: list[any] - :param defines: Allows user to specify defines for the MapReduce job request. - :type defines: dict[str, any] + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar storage_linked_services: Storage linked service references. + :vartype storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :ivar arguments: User specified arguments to HDInsightActivity. + :vartype arguments: list[any] + :ivar get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :vartype get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :ivar class_name: Required. Class name. Type: string (or Expression with resultType string). + :vartype class_name: any + :ivar jar_file_path: Required. Jar path. Type: string (or Expression with resultType string). + :vartype jar_file_path: any + :ivar jar_linked_service: Jar linked service reference. + :vartype jar_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar jar_libs: Jar libs. + :vartype jar_libs: list[any] + :ivar defines: Allows user to specify defines for the MapReduce job request. + :vartype defines: dict[str, any] """ _validation = { @@ -17891,6 +24884,42 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword storage_linked_services: Storage linked service references. + :paramtype storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :keyword arguments: User specified arguments to HDInsightActivity. + :paramtype arguments: list[any] + :keyword get_debug_info: Debug info option. Possible values include: "None", "Always", + "Failure". + :paramtype get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :keyword class_name: Required. Class name. Type: string (or Expression with resultType string). + :paramtype class_name: any + :keyword jar_file_path: Required. Jar path. Type: string (or Expression with resultType + string). + :paramtype jar_file_path: any + :keyword jar_linked_service: Jar linked service reference. + :paramtype jar_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword jar_libs: Jar libs. + :paramtype jar_libs: list[any] + :keyword defines: Allows user to specify defines for the MapReduce job request. + :paramtype defines: dict[str, any] + """ super(HDInsightMapReduceActivity, self).__init__(**kwargs) self.type = 'HDInsightMapReduce' # type: str self.storage_linked_services = kwargs.get('storage_linked_services', None) @@ -17908,120 +24937,119 @@ class HDInsightOnDemandLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param cluster_size: Required. Number of worker/data nodes in the cluster. Suggestion value: 4. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar cluster_size: Required. Number of worker/data nodes in the cluster. Suggestion value: 4. Type: string (or Expression with resultType string). - :type cluster_size: any - :param time_to_live: Required. The allowed idle time for the on-demand HDInsight cluster. + :vartype cluster_size: any + :ivar time_to_live: Required. The allowed idle time for the on-demand HDInsight cluster. Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string (or Expression with resultType string). - :type time_to_live: any - :param version: Required. Version of the HDInsight cluster.  Type: string (or Expression with + :vartype time_to_live: any + :ivar version: Required. Version of the HDInsight cluster.  Type: string (or Expression with resultType string). - :type version: any - :param linked_service_name: Required. Azure Storage linked service to be used by the on-demand + :vartype version: any + :ivar linked_service_name: Required. Azure Storage linked service to be used by the on-demand cluster for storing and processing data. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param host_subscription_id: Required. The customer’s subscription to host the cluster. Type: + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar host_subscription_id: Required. The customer’s subscription to host the cluster. Type: string (or Expression with resultType string). - :type host_subscription_id: any - :param service_principal_id: The service principal id for the hostSubscriptionId. Type: string + :vartype host_subscription_id: any + :ivar service_principal_id: The service principal id for the hostSubscriptionId. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: The key for the service principal id. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. The Tenant id/name to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: any - :param cluster_resource_group: Required. The resource group where the cluster belongs. Type: + :vartype service_principal_id: any + :ivar service_principal_key: The key for the service principal id. + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: Required. The Tenant id/name to which the service principal belongs. Type: string + (or Expression with resultType string). + :vartype tenant: any + :ivar cluster_resource_group: Required. The resource group where the cluster belongs. Type: string (or Expression with resultType string). - :type cluster_resource_group: any - :param cluster_name_prefix: The prefix of cluster name, postfix will be distinct with - timestamp. Type: string (or Expression with resultType string). - :type cluster_name_prefix: any - :param cluster_user_name: The username to access the cluster. Type: string (or Expression with + :vartype cluster_resource_group: any + :ivar cluster_name_prefix: The prefix of cluster name, postfix will be distinct with timestamp. + Type: string (or Expression with resultType string). + :vartype cluster_name_prefix: any + :ivar cluster_user_name: The username to access the cluster. Type: string (or Expression with resultType string). - :type cluster_user_name: any - :param cluster_password: The password to access the cluster. - :type cluster_password: ~azure.mgmt.datafactory.models.SecretBase - :param cluster_ssh_user_name: The username to SSH remotely connect to cluster’s node (for + :vartype cluster_user_name: any + :ivar cluster_password: The password to access the cluster. + :vartype cluster_password: ~azure.mgmt.datafactory.models.SecretBase + :ivar cluster_ssh_user_name: The username to SSH remotely connect to cluster’s node (for Linux). Type: string (or Expression with resultType string). - :type cluster_ssh_user_name: any - :param cluster_ssh_password: The password to SSH remotely connect cluster’s node (for Linux). - :type cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase - :param additional_linked_service_names: Specifies additional storage accounts for the HDInsight + :vartype cluster_ssh_user_name: any + :ivar cluster_ssh_password: The password to SSH remotely connect cluster’s node (for Linux). + :vartype cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase + :ivar additional_linked_service_names: Specifies additional storage accounts for the HDInsight linked service so that the Data Factory service can register them on your behalf. - :type additional_linked_service_names: + :vartype additional_linked_service_names: list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param hcatalog_linked_service_name: The name of Azure SQL linked service that point to the + :ivar hcatalog_linked_service_name: The name of Azure SQL linked service that point to the HCatalog database. The on-demand HDInsight cluster is created by using the Azure SQL database as the metastore. - :type hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param cluster_type: The cluster type. Type: string (or Expression with resultType string). - :type cluster_type: any - :param spark_version: The version of spark if the cluster type is 'spark'. Type: string (or + :vartype hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar cluster_type: The cluster type. Type: string (or Expression with resultType string). + :vartype cluster_type: any + :ivar spark_version: The version of spark if the cluster type is 'spark'. Type: string (or Expression with resultType string). - :type spark_version: any - :param core_configuration: Specifies the core configuration parameters (as in core-site.xml) - for the HDInsight cluster to be created. - :type core_configuration: any - :param h_base_configuration: Specifies the HBase configuration parameters (hbase-site.xml) for + :vartype spark_version: any + :ivar core_configuration: Specifies the core configuration parameters (as in core-site.xml) for + the HDInsight cluster to be created. + :vartype core_configuration: any + :ivar h_base_configuration: Specifies the HBase configuration parameters (hbase-site.xml) for the HDInsight cluster. - :type h_base_configuration: any - :param hdfs_configuration: Specifies the HDFS configuration parameters (hdfs-site.xml) for the + :vartype h_base_configuration: any + :ivar hdfs_configuration: Specifies the HDFS configuration parameters (hdfs-site.xml) for the HDInsight cluster. - :type hdfs_configuration: any - :param hive_configuration: Specifies the hive configuration parameters (hive-site.xml) for the + :vartype hdfs_configuration: any + :ivar hive_configuration: Specifies the hive configuration parameters (hive-site.xml) for the HDInsight cluster. - :type hive_configuration: any - :param map_reduce_configuration: Specifies the MapReduce configuration parameters + :vartype hive_configuration: any + :ivar map_reduce_configuration: Specifies the MapReduce configuration parameters (mapred-site.xml) for the HDInsight cluster. - :type map_reduce_configuration: any - :param oozie_configuration: Specifies the Oozie configuration parameters (oozie-site.xml) for + :vartype map_reduce_configuration: any + :ivar oozie_configuration: Specifies the Oozie configuration parameters (oozie-site.xml) for the HDInsight cluster. - :type oozie_configuration: any - :param storm_configuration: Specifies the Storm configuration parameters (storm-site.xml) for + :vartype oozie_configuration: any + :ivar storm_configuration: Specifies the Storm configuration parameters (storm-site.xml) for the HDInsight cluster. - :type storm_configuration: any - :param yarn_configuration: Specifies the Yarn configuration parameters (yarn-site.xml) for the + :vartype storm_configuration: any + :ivar yarn_configuration: Specifies the Yarn configuration parameters (yarn-site.xml) for the HDInsight cluster. - :type yarn_configuration: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype yarn_configuration: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param head_node_size: Specifies the size of the head node for the HDInsight cluster. - :type head_node_size: any - :param data_node_size: Specifies the size of the data node for the HDInsight cluster. - :type data_node_size: any - :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for the HDInsight - cluster. - :type zookeeper_node_size: any - :param script_actions: Custom script actions to run on HDI ondemand cluster once it's up. - Please refer to + :vartype encrypted_credential: any + :ivar head_node_size: Specifies the size of the head node for the HDInsight cluster. + :vartype head_node_size: any + :ivar data_node_size: Specifies the size of the data node for the HDInsight cluster. + :vartype data_node_size: any + :ivar zookeeper_node_size: Specifies the size of the Zoo Keeper node for the HDInsight cluster. + :vartype zookeeper_node_size: any + :ivar script_actions: Custom script actions to run on HDI ondemand cluster once it's up. Please + refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. - :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] - :param virtual_network_id: The ARM resource ID for the vNet to which the cluster should be + :vartype script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] + :ivar virtual_network_id: The ARM resource ID for the vNet to which the cluster should be joined after creation. Type: string (or Expression with resultType string). - :type virtual_network_id: any - :param subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was + :vartype virtual_network_id: any + :ivar subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was specified, then this property is required. Type: string (or Expression with resultType string). - :type subnet_name: any - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype subnet_name: any + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -18082,6 +25110,120 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword cluster_size: Required. Number of worker/data nodes in the cluster. Suggestion value: + 4. Type: string (or Expression with resultType string). + :paramtype cluster_size: any + :keyword time_to_live: Required. The allowed idle time for the on-demand HDInsight cluster. + Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity + run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string + (or Expression with resultType string). + :paramtype time_to_live: any + :keyword version: Required. Version of the HDInsight cluster.  Type: string (or Expression with + resultType string). + :paramtype version: any + :keyword linked_service_name: Required. Azure Storage linked service to be used by the + on-demand cluster for storing and processing data. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword host_subscription_id: Required. The customer’s subscription to host the cluster. Type: + string (or Expression with resultType string). + :paramtype host_subscription_id: any + :keyword service_principal_id: The service principal id for the hostSubscriptionId. Type: + string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: The key for the service principal id. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: Required. The Tenant id/name to which the service principal belongs. Type: + string (or Expression with resultType string). + :paramtype tenant: any + :keyword cluster_resource_group: Required. The resource group where the cluster belongs. Type: + string (or Expression with resultType string). + :paramtype cluster_resource_group: any + :keyword cluster_name_prefix: The prefix of cluster name, postfix will be distinct with + timestamp. Type: string (or Expression with resultType string). + :paramtype cluster_name_prefix: any + :keyword cluster_user_name: The username to access the cluster. Type: string (or Expression + with resultType string). + :paramtype cluster_user_name: any + :keyword cluster_password: The password to access the cluster. + :paramtype cluster_password: ~azure.mgmt.datafactory.models.SecretBase + :keyword cluster_ssh_user_name: The username to SSH remotely connect to cluster’s node (for + Linux). Type: string (or Expression with resultType string). + :paramtype cluster_ssh_user_name: any + :keyword cluster_ssh_password: The password to SSH remotely connect cluster’s node (for Linux). + :paramtype cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase + :keyword additional_linked_service_names: Specifies additional storage accounts for the + HDInsight linked service so that the Data Factory service can register them on your behalf. + :paramtype additional_linked_service_names: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :keyword hcatalog_linked_service_name: The name of Azure SQL linked service that point to the + HCatalog database. The on-demand HDInsight cluster is created by using the Azure SQL database + as the metastore. + :paramtype hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword cluster_type: The cluster type. Type: string (or Expression with resultType string). + :paramtype cluster_type: any + :keyword spark_version: The version of spark if the cluster type is 'spark'. Type: string (or + Expression with resultType string). + :paramtype spark_version: any + :keyword core_configuration: Specifies the core configuration parameters (as in core-site.xml) + for the HDInsight cluster to be created. + :paramtype core_configuration: any + :keyword h_base_configuration: Specifies the HBase configuration parameters (hbase-site.xml) + for the HDInsight cluster. + :paramtype h_base_configuration: any + :keyword hdfs_configuration: Specifies the HDFS configuration parameters (hdfs-site.xml) for + the HDInsight cluster. + :paramtype hdfs_configuration: any + :keyword hive_configuration: Specifies the hive configuration parameters (hive-site.xml) for + the HDInsight cluster. + :paramtype hive_configuration: any + :keyword map_reduce_configuration: Specifies the MapReduce configuration parameters + (mapred-site.xml) for the HDInsight cluster. + :paramtype map_reduce_configuration: any + :keyword oozie_configuration: Specifies the Oozie configuration parameters (oozie-site.xml) for + the HDInsight cluster. + :paramtype oozie_configuration: any + :keyword storm_configuration: Specifies the Storm configuration parameters (storm-site.xml) for + the HDInsight cluster. + :paramtype storm_configuration: any + :keyword yarn_configuration: Specifies the Yarn configuration parameters (yarn-site.xml) for + the HDInsight cluster. + :paramtype yarn_configuration: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword head_node_size: Specifies the size of the head node for the HDInsight cluster. + :paramtype head_node_size: any + :keyword data_node_size: Specifies the size of the data node for the HDInsight cluster. + :paramtype data_node_size: any + :keyword zookeeper_node_size: Specifies the size of the Zoo Keeper node for the HDInsight + cluster. + :paramtype zookeeper_node_size: any + :keyword script_actions: Custom script actions to run on HDI ondemand cluster once it's up. + Please refer to + https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. + :paramtype script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] + :keyword virtual_network_id: The ARM resource ID for the vNet to which the cluster should be + joined after creation. Type: string (or Expression with resultType string). + :paramtype virtual_network_id: any + :keyword subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was + specified, then this property is required. Type: string (or Expression with resultType string). + :paramtype subnet_name: any + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(HDInsightOnDemandLinkedService, self).__init__(**kwargs) self.type = 'HDInsightOnDemand' # type: str self.cluster_size = kwargs['cluster_size'] @@ -18125,36 +25267,36 @@ class HDInsightPigActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. Type: array (or Expression - with resultType array). - :type arguments: any - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param script_path: Script path. Type: string (or Expression with resultType string). - :type script_path: any - :param script_linked_service: Script linked service reference. - :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param defines: Allows user to specify defines for Pig job request. - :type defines: dict[str, any] + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar storage_linked_services: Storage linked service references. + :vartype storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :ivar arguments: User specified arguments to HDInsightActivity. Type: array (or Expression with + resultType array). + :vartype arguments: any + :ivar get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :vartype get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :ivar script_path: Script path. Type: string (or Expression with resultType string). + :vartype script_path: any + :ivar script_linked_service: Script linked service reference. + :vartype script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar defines: Allows user to specify defines for Pig job request. + :vartype defines: dict[str, any] """ _validation = { @@ -18183,6 +25325,38 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword storage_linked_services: Storage linked service references. + :paramtype storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :keyword arguments: User specified arguments to HDInsightActivity. Type: array (or Expression + with resultType array). + :paramtype arguments: any + :keyword get_debug_info: Debug info option. Possible values include: "None", "Always", + "Failure". + :paramtype get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :keyword script_path: Script path. Type: string (or Expression with resultType string). + :paramtype script_path: any + :keyword script_linked_service: Script linked service reference. + :paramtype script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword defines: Allows user to specify defines for Pig job request. + :paramtype defines: dict[str, any] + """ super(HDInsightPigActivity, self).__init__(**kwargs) self.type = 'HDInsightPig' # type: str self.storage_linked_services = kwargs.get('storage_linked_services', None) @@ -18198,43 +25372,43 @@ class HDInsightSparkActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param root_path: Required. The root path in 'sparkJobLinkedService' for all the job’s files. + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar root_path: Required. The root path in 'sparkJobLinkedService' for all the job’s files. Type: string (or Expression with resultType string). - :type root_path: any - :param entry_file_path: Required. The relative path to the root folder of the code/package to - be executed. Type: string (or Expression with resultType string). - :type entry_file_path: any - :param arguments: The user-specified arguments to HDInsightSparkActivity. - :type arguments: list[any] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param spark_job_linked_service: The storage linked service for uploading the entry file and + :vartype root_path: any + :ivar entry_file_path: Required. The relative path to the root folder of the code/package to be + executed. Type: string (or Expression with resultType string). + :vartype entry_file_path: any + :ivar arguments: The user-specified arguments to HDInsightSparkActivity. + :vartype arguments: list[any] + :ivar get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :vartype get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :ivar spark_job_linked_service: The storage linked service for uploading the entry file and dependencies, and for receiving logs. - :type spark_job_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param class_name: The application's Java/Spark main class. - :type class_name: str - :param proxy_user: The user to impersonate that will execute the job. Type: string (or + :vartype spark_job_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar class_name: The application's Java/Spark main class. + :vartype class_name: str + :ivar proxy_user: The user to impersonate that will execute the job. Type: string (or Expression with resultType string). - :type proxy_user: any - :param spark_config: Spark configuration property. - :type spark_config: dict[str, any] + :vartype proxy_user: any + :ivar spark_config: Spark configuration property. + :vartype spark_config: dict[str, any] """ _validation = { @@ -18267,6 +25441,45 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword root_path: Required. The root path in 'sparkJobLinkedService' for all the job’s files. + Type: string (or Expression with resultType string). + :paramtype root_path: any + :keyword entry_file_path: Required. The relative path to the root folder of the code/package to + be executed. Type: string (or Expression with resultType string). + :paramtype entry_file_path: any + :keyword arguments: The user-specified arguments to HDInsightSparkActivity. + :paramtype arguments: list[any] + :keyword get_debug_info: Debug info option. Possible values include: "None", "Always", + "Failure". + :paramtype get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :keyword spark_job_linked_service: The storage linked service for uploading the entry file and + dependencies, and for receiving logs. + :paramtype spark_job_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword class_name: The application's Java/Spark main class. + :paramtype class_name: str + :keyword proxy_user: The user to impersonate that will execute the job. Type: string (or + Expression with resultType string). + :paramtype proxy_user: any + :keyword spark_config: Spark configuration property. + :paramtype spark_config: dict[str, any] + """ super(HDInsightSparkActivity, self).__init__(**kwargs) self.type = 'HDInsightSpark' # type: str self.root_path = kwargs['root_path'] @@ -18284,49 +25497,49 @@ class HDInsightStreamingActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[any] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param mapper: Required. Mapper executable name. Type: string (or Expression with resultType + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar storage_linked_services: Storage linked service references. + :vartype storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :ivar arguments: User specified arguments to HDInsightActivity. + :vartype arguments: list[any] + :ivar get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :vartype get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :ivar mapper: Required. Mapper executable name. Type: string (or Expression with resultType string). - :type mapper: any - :param reducer: Required. Reducer executable name. Type: string (or Expression with resultType + :vartype mapper: any + :ivar reducer: Required. Reducer executable name. Type: string (or Expression with resultType string). - :type reducer: any - :param input: Required. Input blob path. Type: string (or Expression with resultType string). - :type input: any - :param output: Required. Output blob path. Type: string (or Expression with resultType string). - :type output: any - :param file_paths: Required. Paths to streaming job files. Can be directories. - :type file_paths: list[any] - :param file_linked_service: Linked service reference where the files are located. - :type file_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param combiner: Combiner executable name. Type: string (or Expression with resultType string). - :type combiner: any - :param command_environment: Command line environment values. - :type command_environment: list[any] - :param defines: Allows user to specify defines for streaming job request. - :type defines: dict[str, any] + :vartype reducer: any + :ivar input: Required. Input blob path. Type: string (or Expression with resultType string). + :vartype input: any + :ivar output: Required. Output blob path. Type: string (or Expression with resultType string). + :vartype output: any + :ivar file_paths: Required. Paths to streaming job files. Can be directories. + :vartype file_paths: list[any] + :ivar file_linked_service: Linked service reference where the files are located. + :vartype file_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar combiner: Combiner executable name. Type: string (or Expression with resultType string). + :vartype combiner: any + :ivar command_environment: Command line environment values. + :vartype command_environment: list[any] + :ivar defines: Allows user to specify defines for streaming job request. + :vartype defines: dict[str, any] """ _validation = { @@ -18366,6 +25579,53 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword storage_linked_services: Storage linked service references. + :paramtype storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :keyword arguments: User specified arguments to HDInsightActivity. + :paramtype arguments: list[any] + :keyword get_debug_info: Debug info option. Possible values include: "None", "Always", + "Failure". + :paramtype get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :keyword mapper: Required. Mapper executable name. Type: string (or Expression with resultType + string). + :paramtype mapper: any + :keyword reducer: Required. Reducer executable name. Type: string (or Expression with + resultType string). + :paramtype reducer: any + :keyword input: Required. Input blob path. Type: string (or Expression with resultType string). + :paramtype input: any + :keyword output: Required. Output blob path. Type: string (or Expression with resultType + string). + :paramtype output: any + :keyword file_paths: Required. Paths to streaming job files. Can be directories. + :paramtype file_paths: list[any] + :keyword file_linked_service: Linked service reference where the files are located. + :paramtype file_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword combiner: Combiner executable name. Type: string (or Expression with resultType + string). + :paramtype combiner: any + :keyword command_environment: Command line environment values. + :paramtype command_environment: list[any] + :keyword defines: Allows user to specify defines for streaming job request. + :paramtype defines: dict[str, any] + """ super(HDInsightStreamingActivity, self).__init__(**kwargs) self.type = 'HDInsightStreaming' # type: str self.storage_linked_services = kwargs.get('storage_linked_services', None) @@ -18387,70 +25647,70 @@ class HiveLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. IP address or host name of the Hive server, separated by ';' for - multiple hosts (only when serviceDiscoveryMode is enable). - :type host: any - :param port: The TCP port that the Hive server uses to listen for client connections. - :type port: any - :param server_type: The type of Hive server. Possible values include: "HiveServer1", + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. IP address or host name of the Hive server, separated by ';' for multiple + hosts (only when serviceDiscoveryMode is enable). + :vartype host: any + :ivar port: The TCP port that the Hive server uses to listen for client connections. + :vartype port: any + :ivar server_type: The type of Hive server. Possible values include: "HiveServer1", "HiveServer2", "HiveThriftServer". - :type server_type: str or ~azure.mgmt.datafactory.models.HiveServerType - :param thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible + :vartype server_type: str or ~azure.mgmt.datafactory.models.HiveServerType + :ivar thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible values include: "Binary", "SASL", "HTTP ". - :type thrift_transport_protocol: str or + :vartype thrift_transport_protocol: str or ~azure.mgmt.datafactory.models.HiveThriftTransportProtocol - :param authentication_type: Required. The authentication method used to access the Hive server. + :ivar authentication_type: Required. The authentication method used to access the Hive server. Possible values include: "Anonymous", "Username", "UsernameAndPassword", "WindowsAzureHDInsightService". - :type authentication_type: str or ~azure.mgmt.datafactory.models.HiveAuthenticationType - :param service_discovery_mode: true to indicate using the ZooKeeper service, false not. - :type service_discovery_mode: any - :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive Server 2 nodes are + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.HiveAuthenticationType + :ivar service_discovery_mode: true to indicate using the ZooKeeper service, false not. + :vartype service_discovery_mode: any + :ivar zoo_keeper_name_space: The namespace on ZooKeeper under which Hive Server 2 nodes are added. - :type zoo_keeper_name_space: any - :param use_native_query: Specifies whether the driver uses native HiveQL queries,or converts + :vartype zoo_keeper_name_space: any + :ivar use_native_query: Specifies whether the driver uses native HiveQL queries,or converts them into an equivalent form in HiveQL. - :type use_native_query: any - :param username: The user name that you use to access Hive Server. - :type username: any - :param password: The password corresponding to the user name that you provided in the Username + :vartype use_native_query: any + :ivar username: The user name that you use to access Hive Server. + :vartype username: any + :ivar password: The password corresponding to the user name that you provided in the Username field. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param http_path: The partial URL corresponding to the Hive server. - :type http_path: any - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar http_path: The partial URL corresponding to the Hive server. + :vartype http_path: any + :ivar enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :type enable_ssl: any - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + :vartype enable_ssl: any + :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: any - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + :vartype trusted_cert_path: any + :ivar use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: any - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + :vartype use_system_trust_store: any + :ivar allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: any - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + :vartype allow_host_name_cn_mismatch: any + :ivar allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype allow_self_signed_server_cert: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -18489,6 +25749,70 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. IP address or host name of the Hive server, separated by ';' for + multiple hosts (only when serviceDiscoveryMode is enable). + :paramtype host: any + :keyword port: The TCP port that the Hive server uses to listen for client connections. + :paramtype port: any + :keyword server_type: The type of Hive server. Possible values include: "HiveServer1", + "HiveServer2", "HiveThriftServer". + :paramtype server_type: str or ~azure.mgmt.datafactory.models.HiveServerType + :keyword thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible + values include: "Binary", "SASL", "HTTP ". + :paramtype thrift_transport_protocol: str or + ~azure.mgmt.datafactory.models.HiveThriftTransportProtocol + :keyword authentication_type: Required. The authentication method used to access the Hive + server. Possible values include: "Anonymous", "Username", "UsernameAndPassword", + "WindowsAzureHDInsightService". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.HiveAuthenticationType + :keyword service_discovery_mode: true to indicate using the ZooKeeper service, false not. + :paramtype service_discovery_mode: any + :keyword zoo_keeper_name_space: The namespace on ZooKeeper under which Hive Server 2 nodes are + added. + :paramtype zoo_keeper_name_space: any + :keyword use_native_query: Specifies whether the driver uses native HiveQL queries,or converts + them into an equivalent form in HiveQL. + :paramtype use_native_query: any + :keyword username: The user name that you use to access Hive Server. + :paramtype username: any + :keyword password: The password corresponding to the user name that you provided in the + Username field. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword http_path: The partial URL corresponding to the Hive server. + :paramtype http_path: any + :keyword enable_ssl: Specifies whether the connections to the server are encrypted using SSL. + The default value is false. + :paramtype enable_ssl: any + :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates + for verifying the server when connecting over SSL. This property can only be set when using SSL + on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :paramtype trusted_cert_path: any + :keyword use_system_trust_store: Specifies whether to use a CA certificate from the system + trust store or from a specified PEM file. The default value is false. + :paramtype use_system_trust_store: any + :keyword allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :paramtype allow_host_name_cn_mismatch: any + :keyword allow_self_signed_server_cert: Specifies whether to allow self-signed certificates + from the server. The default value is false. + :paramtype allow_self_signed_server_cert: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(HiveLinkedService, self).__init__(**kwargs) self.type = 'Hive' # type: str self.host = kwargs['host'] @@ -18515,36 +25839,36 @@ class HiveObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param table: The table name of the Hive. Type: string (or Expression with resultType string). - :type table: any - :param schema_type_properties_schema: The schema name of the Hive. Type: string (or Expression + :vartype table_name: any + :ivar table: The table name of the Hive. Type: string (or Expression with resultType string). + :vartype table: any + :ivar schema_type_properties_schema: The schema name of the Hive. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any + :vartype schema_type_properties_schema: any """ _validation = { @@ -18571,6 +25895,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword table: The table name of the Hive. Type: string (or Expression with resultType + string). + :paramtype table: any + :keyword schema_type_properties_schema: The schema name of the Hive. Type: string (or + Expression with resultType string). + :paramtype schema_type_properties_schema: any + """ super(HiveObjectDataset, self).__init__(**kwargs) self.type = 'HiveObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -18583,32 +25938,32 @@ class HiveSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -18631,6 +25986,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(HiveSource, self).__init__(**kwargs) self.type = 'HiveSource' # type: str self.query = kwargs.get('query', None) @@ -18641,47 +26022,47 @@ class HttpDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param relative_url: The relative URL based on the URL in the HttpLinkedService refers to an + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar relative_url: The relative URL based on the URL in the HttpLinkedService refers to an HTTP file Type: string (or Expression with resultType string). - :type relative_url: any - :param request_method: The HTTP method for the HTTP request. Type: string (or Expression with + :vartype relative_url: any + :ivar request_method: The HTTP method for the HTTP request. Type: string (or Expression with resultType string). - :type request_method: any - :param request_body: The body for the HTTP request. Type: string (or Expression with resultType + :vartype request_method: any + :ivar request_body: The body for the HTTP request. Type: string (or Expression with resultType string). - :type request_body: any - :param additional_headers: The headers for the HTTP Request. e.g. + :vartype request_body: any + :ivar additional_headers: The headers for the HTTP Request. e.g. request-header-name-1:request-header-value-1 ... request-header-name-n:request-header-value-n Type: string (or Expression with resultType string). - :type additional_headers: any - :param format: The format of files. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used on files. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + :vartype additional_headers: any + :ivar format: The format of files. + :vartype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :ivar compression: The data compression method used on files. + :vartype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -18711,6 +26092,47 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword relative_url: The relative URL based on the URL in the HttpLinkedService refers to an + HTTP file Type: string (or Expression with resultType string). + :paramtype relative_url: any + :keyword request_method: The HTTP method for the HTTP request. Type: string (or Expression with + resultType string). + :paramtype request_method: any + :keyword request_body: The body for the HTTP request. Type: string (or Expression with + resultType string). + :paramtype request_body: any + :keyword additional_headers: The headers for the HTTP Request. e.g. + request-header-name-1:request-header-value-1 + ... + request-header-name-n:request-header-value-n Type: string (or Expression with resultType + string). + :paramtype additional_headers: any + :keyword format: The format of files. + :paramtype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :keyword compression: The data compression method used on files. + :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ super(HttpDataset, self).__init__(**kwargs) self.type = 'HttpFile' # type: str self.relative_url = kwargs.get('relative_url', None) @@ -18726,51 +26148,51 @@ class HttpLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param url: Required. The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar url: Required. The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: string (or Expression with resultType string). - :type url: any - :param authentication_type: The authentication type to be used to connect to the HTTP server. + :vartype url: any + :ivar authentication_type: The authentication type to be used to connect to the HTTP server. Possible values include: "Basic", "Anonymous", "Digest", "Windows", "ClientCertificate". - :type authentication_type: str or ~azure.mgmt.datafactory.models.HttpAuthenticationType - :param user_name: User name for Basic, Digest, or Windows authentication. Type: string (or + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.HttpAuthenticationType + :ivar user_name: User name for Basic, Digest, or Windows authentication. Type: string (or Expression with resultType string). - :type user_name: any - :param password: Password for Basic, Digest, Windows, or ClientCertificate with - EmbeddedCertData authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param auth_headers: The additional HTTP headers in the request to RESTful API used for + :vartype user_name: any + :ivar password: Password for Basic, Digest, Windows, or ClientCertificate with EmbeddedCertData + authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar auth_headers: The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). - :type auth_headers: any - :param embedded_cert_data: Base64 encoded certificate data for ClientCertificate - authentication. For on-premises copy with ClientCertificate authentication, either - CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression - with resultType string). - :type embedded_cert_data: any - :param cert_thumbprint: Thumbprint of certificate for ClientCertificate authentication. Only + :vartype auth_headers: any + :ivar embedded_cert_data: Base64 encoded certificate data for ClientCertificate authentication. + For on-premises copy with ClientCertificate authentication, either CertThumbprint or + EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType + string). + :vartype embedded_cert_data: any + :ivar cert_thumbprint: Thumbprint of certificate for ClientCertificate authentication. Only valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). - :type cert_thumbprint: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype cert_thumbprint: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param enable_server_certificate_validation: If true, validate the HTTPS server SSL - certificate. Default value is true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: any + :vartype encrypted_credential: any + :ivar enable_server_certificate_validation: If true, validate the HTTPS server SSL certificate. + Default value is true. Type: boolean (or Expression with resultType boolean). + :vartype enable_server_certificate_validation: any """ _validation = { @@ -18800,6 +26222,51 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword url: Required. The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: + string (or Expression with resultType string). + :paramtype url: any + :keyword authentication_type: The authentication type to be used to connect to the HTTP server. + Possible values include: "Basic", "Anonymous", "Digest", "Windows", "ClientCertificate". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.HttpAuthenticationType + :keyword user_name: User name for Basic, Digest, or Windows authentication. Type: string (or + Expression with resultType string). + :paramtype user_name: any + :keyword password: Password for Basic, Digest, Windows, or ClientCertificate with + EmbeddedCertData authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword auth_headers: The additional HTTP headers in the request to RESTful API used for + authorization. Type: object (or Expression with resultType object). + :paramtype auth_headers: any + :keyword embedded_cert_data: Base64 encoded certificate data for ClientCertificate + authentication. For on-premises copy with ClientCertificate authentication, either + CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :paramtype embedded_cert_data: any + :keyword cert_thumbprint: Thumbprint of certificate for ClientCertificate authentication. Only + valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either + CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :paramtype cert_thumbprint: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword enable_server_certificate_validation: If true, validate the HTTPS server SSL + certificate. Default value is true. Type: boolean (or Expression with resultType boolean). + :paramtype enable_server_certificate_validation: any + """ super(HttpLinkedService, self).__init__(**kwargs) self.type = 'HttpServer' # type: str self.url = kwargs['url'] @@ -18818,34 +26285,34 @@ class HttpReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: + :vartype disable_metrics_collection: any + :ivar request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). - :type request_method: any - :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: + :vartype request_method: any + :ivar request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). - :type request_body: any - :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: + :vartype request_body: any + :ivar additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: any - :param request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP + :vartype additional_headers: any + :ivar request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. - :type request_timeout: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype request_timeout: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any + :vartype partition_root_path: any """ _validation = { @@ -18869,6 +26336,34 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword request_method: The HTTP method used to call the RESTful API. The default is GET. + Type: string (or Expression with resultType string). + :paramtype request_method: any + :keyword request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: + string (or Expression with resultType string). + :paramtype request_body: any + :keyword additional_headers: The additional HTTP headers in the request to the RESTful API. + Type: string (or Expression with resultType string). + :paramtype additional_headers: any + :keyword request_timeout: Specifies the timeout for a HTTP client to get HTTP response from + HTTP server. + :paramtype request_timeout: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + """ super(HttpReadSettings, self).__init__(**kwargs) self.type = 'HttpReadSettings' # type: str self.request_method = kwargs.get('request_method', None) @@ -18884,20 +26379,20 @@ class HttpServerLocation(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any - :param relative_url: Specify the relativeUrl of http server. Type: string (or Expression with + :vartype file_name: any + :ivar relative_url: Specify the relativeUrl of http server. Type: string (or Expression with resultType string). - :type relative_url: any + :vartype relative_url: any """ _validation = { @@ -18916,6 +26411,20 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + :keyword relative_url: Specify the relativeUrl of http server. Type: string (or Expression with + resultType string). + :paramtype relative_url: any + """ super(HttpServerLocation, self).__init__(**kwargs) self.type = 'HttpServerLocation' # type: str self.relative_url = kwargs.get('relative_url', None) @@ -18926,28 +26435,28 @@ class HttpSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from + :vartype disable_metrics_collection: any + :ivar http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: any + :vartype http_request_timeout: any """ _validation = { @@ -18968,6 +26477,28 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response + from HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: + string (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype http_request_timeout: any + """ super(HttpSource, self).__init__(**kwargs) self.type = 'HttpSource' # type: str self.http_request_timeout = kwargs.get('http_request_timeout', None) @@ -18978,43 +26509,43 @@ class HubspotLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param client_id: Required. The client ID associated with your Hubspot application. - :type client_id: any - :param client_secret: The client secret associated with your Hubspot application. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param access_token: The access token obtained when initially authenticating your OAuth + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar client_id: Required. The client ID associated with your Hubspot application. + :vartype client_id: any + :ivar client_secret: The client secret associated with your Hubspot application. + :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar access_token: The access token obtained when initially authenticating your OAuth integration. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param refresh_token: The refresh token obtained when initially authenticating your OAuth + :vartype access_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar refresh_token: The refresh token obtained when initially authenticating your OAuth integration. - :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -19043,6 +26574,43 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword client_id: Required. The client ID associated with your Hubspot application. + :paramtype client_id: any + :keyword client_secret: The client secret associated with your Hubspot application. + :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword access_token: The access token obtained when initially authenticating your OAuth + integration. + :paramtype access_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword refresh_token: The refresh token obtained when initially authenticating your OAuth + integration. + :paramtype refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(HubspotLinkedService, self).__init__(**kwargs) self.type = 'Hubspot' # type: str self.client_id = kwargs['client_id'] @@ -19060,30 +26628,30 @@ class HubspotObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -19108,6 +26676,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(HubspotObjectDataset, self).__init__(**kwargs) self.type = 'HubspotObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -19118,32 +26710,32 @@ class HubspotSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -19166,6 +26758,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(HubspotSource, self).__init__(**kwargs) self.type = 'HubspotSource' # type: str self.query = kwargs.get('query', None) @@ -19176,29 +26794,29 @@ class IfConditionActivity(ControlActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param expression: Required. An expression that would evaluate to Boolean. This is used to + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar expression: Required. An expression that would evaluate to Boolean. This is used to determine the block of activities (ifTrueActivities or ifFalseActivities) that will be executed. - :type expression: ~azure.mgmt.datafactory.models.Expression - :param if_true_activities: List of activities to execute if expression is evaluated to true. + :vartype expression: ~azure.mgmt.datafactory.models.Expression + :ivar if_true_activities: List of activities to execute if expression is evaluated to true. This is an optional property and if not provided, the activity will exit without any action. - :type if_true_activities: list[~azure.mgmt.datafactory.models.Activity] - :param if_false_activities: List of activities to execute if expression is evaluated to false. + :vartype if_true_activities: list[~azure.mgmt.datafactory.models.Activity] + :ivar if_false_activities: List of activities to execute if expression is evaluated to false. This is an optional property and if not provided, the activity will exit without any action. - :type if_false_activities: list[~azure.mgmt.datafactory.models.Activity] + :vartype if_false_activities: list[~azure.mgmt.datafactory.models.Activity] """ _validation = { @@ -19223,6 +26841,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword expression: Required. An expression that would evaluate to Boolean. This is used to + determine the block of activities (ifTrueActivities or ifFalseActivities) that will be + executed. + :paramtype expression: ~azure.mgmt.datafactory.models.Expression + :keyword if_true_activities: List of activities to execute if expression is evaluated to true. + This is an optional property and if not provided, the activity will exit without any action. + :paramtype if_true_activities: list[~azure.mgmt.datafactory.models.Activity] + :keyword if_false_activities: List of activities to execute if expression is evaluated to + false. This is an optional property and if not provided, the activity will exit without any + action. + :paramtype if_false_activities: list[~azure.mgmt.datafactory.models.Activity] + """ super(IfConditionActivity, self).__init__(**kwargs) self.type = 'IfCondition' # type: str self.expression = kwargs['expression'] @@ -19235,53 +26877,52 @@ class ImpalaLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. The IP address or host name of the Impala server. (i.e. - 192.168.222.160). - :type host: any - :param port: The TCP port that the Impala server uses to listen for client connections. The + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. The IP address or host name of the Impala server. (i.e. 192.168.222.160). + :vartype host: any + :ivar port: The TCP port that the Impala server uses to listen for client connections. The default value is 21050. - :type port: any - :param authentication_type: Required. The authentication type to use. Possible values include: + :vartype port: any + :ivar authentication_type: Required. The authentication type to use. Possible values include: "Anonymous", "SASLUsername", "UsernameAndPassword". - :type authentication_type: str or ~azure.mgmt.datafactory.models.ImpalaAuthenticationType - :param username: The user name used to access the Impala server. The default value is anonymous + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.ImpalaAuthenticationType + :ivar username: The user name used to access the Impala server. The default value is anonymous when using SASLUsername. - :type username: any - :param password: The password corresponding to the user name when using UsernameAndPassword. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + :vartype username: any + :ivar password: The password corresponding to the user name when using UsernameAndPassword. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :type enable_ssl: any - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + :vartype enable_ssl: any + :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: any - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + :vartype trusted_cert_path: any + :ivar use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: any - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + :vartype use_system_trust_store: any + :ivar allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: any - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + :vartype allow_host_name_cn_mismatch: any + :ivar allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype allow_self_signed_server_cert: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -19314,6 +26955,53 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. The IP address or host name of the Impala server. (i.e. + 192.168.222.160). + :paramtype host: any + :keyword port: The TCP port that the Impala server uses to listen for client connections. The + default value is 21050. + :paramtype port: any + :keyword authentication_type: Required. The authentication type to use. Possible values + include: "Anonymous", "SASLUsername", "UsernameAndPassword". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.ImpalaAuthenticationType + :keyword username: The user name used to access the Impala server. The default value is + anonymous when using SASLUsername. + :paramtype username: any + :keyword password: The password corresponding to the user name when using UsernameAndPassword. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword enable_ssl: Specifies whether the connections to the server are encrypted using SSL. + The default value is false. + :paramtype enable_ssl: any + :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates + for verifying the server when connecting over SSL. This property can only be set when using SSL + on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :paramtype trusted_cert_path: any + :keyword use_system_trust_store: Specifies whether to use a CA certificate from the system + trust store or from a specified PEM file. The default value is false. + :paramtype use_system_trust_store: any + :keyword allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :paramtype allow_host_name_cn_mismatch: any + :keyword allow_self_signed_server_cert: Specifies whether to allow self-signed certificates + from the server. The default value is false. + :paramtype allow_self_signed_server_cert: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(ImpalaLinkedService, self).__init__(**kwargs) self.type = 'Impala' # type: str self.host = kwargs['host'] @@ -19334,37 +27022,36 @@ class ImpalaObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param table: The table name of the Impala. Type: string (or Expression with resultType - string). - :type table: any - :param schema_type_properties_schema: The schema name of the Impala. Type: string (or - Expression with resultType string). - :type schema_type_properties_schema: any + :vartype table_name: any + :ivar table: The table name of the Impala. Type: string (or Expression with resultType string). + :vartype table: any + :ivar schema_type_properties_schema: The schema name of the Impala. Type: string (or Expression + with resultType string). + :vartype schema_type_properties_schema: any """ _validation = { @@ -19391,6 +27078,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword table: The table name of the Impala. Type: string (or Expression with resultType + string). + :paramtype table: any + :keyword schema_type_properties_schema: The schema name of the Impala. Type: string (or + Expression with resultType string). + :paramtype schema_type_properties_schema: any + """ super(ImpalaObjectDataset, self).__init__(**kwargs) self.type = 'ImpalaObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -19403,32 +27121,32 @@ class ImpalaSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -19451,6 +27169,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(ImpalaSource, self).__init__(**kwargs) self.type = 'ImpalaSource' # type: str self.query = kwargs.get('query', None) @@ -19461,39 +27205,39 @@ class InformixLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The non-access credential portion of the connection string + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param authentication_type: Type of authentication used to connect to the Informix as ODBC data + :vartype connection_string: any + :ivar authentication_type: Type of authentication used to connect to the Informix as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). - :type authentication_type: any - :param credential: The access credential portion of the connection string specified in + :vartype authentication_type: any + :ivar credential: The access credential portion of the connection string specified in driver-specific property-value format. - :type credential: ~azure.mgmt.datafactory.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or Expression with + :vartype credential: ~azure.mgmt.datafactory.models.SecretBase + :ivar user_name: User name for Basic authentication. Type: string (or Expression with resultType string). - :type user_name: any - :param password: Password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype user_name: any + :ivar password: Password for Basic authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -19520,6 +27264,39 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The non-access credential portion of the connection + string as well as an optional encrypted credential. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword authentication_type: Type of authentication used to connect to the Informix as ODBC + data store. Possible values are: Anonymous and Basic. Type: string (or Expression with + resultType string). + :paramtype authentication_type: any + :keyword credential: The access credential portion of the connection string specified in + driver-specific property-value format. + :paramtype credential: ~azure.mgmt.datafactory.models.SecretBase + :keyword user_name: User name for Basic authentication. Type: string (or Expression with + resultType string). + :paramtype user_name: any + :keyword password: Password for Basic authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(InformixLinkedService, self).__init__(**kwargs) self.type = 'Informix' # type: str self.connection_string = kwargs['connection_string'] @@ -19535,32 +27312,32 @@ class InformixSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param pre_copy_script: A query to execute before starting the copy. Type: string (or - Expression with resultType string). - :type pre_copy_script: any + :vartype disable_metrics_collection: any + :ivar pre_copy_script: A query to execute before starting the copy. Type: string (or Expression + with resultType string). + :vartype pre_copy_script: any """ _validation = { @@ -19583,6 +27360,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :paramtype pre_copy_script: any + """ super(InformixSink, self).__init__(**kwargs) self.type = 'InformixSink' # type: str self.pre_copy_script = kwargs.get('pre_copy_script', None) @@ -19593,31 +27396,31 @@ class InformixSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: Database query. Type: string (or Expression with resultType string). - :type query: any + :vartype additional_columns: any + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: any """ _validation = { @@ -19640,6 +27443,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: any + """ super(InformixSource, self).__init__(**kwargs) self.type = 'InformixSource' # type: str self.query = kwargs.get('query', None) @@ -19650,31 +27478,30 @@ class InformixTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The Informix table name. Type: string (or Expression with resultType - string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The Informix table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -19699,6 +27526,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The Informix table name. Type: string (or Expression with resultType + string). + :paramtype table_name: any + """ super(InformixTableDataset, self).__init__(**kwargs) self.type = 'InformixTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -19712,14 +27564,14 @@ class IntegrationRuntime(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of integration runtime.Constant filled by server. Possible values + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". - :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType - :param description: Integration runtime description. - :type description: str + :vartype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType + :ivar description: Integration runtime description. + :vartype description: str """ _validation = { @@ -19740,6 +27592,13 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Integration runtime description. + :paramtype description: str + """ super(IntegrationRuntime, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.type = 'IntegrationRuntime' # type: str @@ -19749,10 +27608,10 @@ def __init__( class IntegrationRuntimeAuthKeys(msrest.serialization.Model): """The integration runtime authentication keys. - :param auth_key1: The primary integration runtime authentication key. - :type auth_key1: str - :param auth_key2: The secondary integration runtime authentication key. - :type auth_key2: str + :ivar auth_key1: The primary integration runtime authentication key. + :vartype auth_key1: str + :ivar auth_key2: The secondary integration runtime authentication key. + :vartype auth_key2: str """ _attribute_map = { @@ -19764,6 +27623,12 @@ def __init__( self, **kwargs ): + """ + :keyword auth_key1: The primary integration runtime authentication key. + :paramtype auth_key1: str + :keyword auth_key2: The secondary integration runtime authentication key. + :paramtype auth_key2: str + """ super(IntegrationRuntimeAuthKeys, self).__init__(**kwargs) self.auth_key1 = kwargs.get('auth_key1', None) self.auth_key2 = kwargs.get('auth_key2', None) @@ -19772,24 +27637,25 @@ def __init__( class IntegrationRuntimeComputeProperties(msrest.serialization.Model): """The compute resource properties for managed integration runtime. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param location: The location for managed integration runtime. The supported regions could be + :vartype additional_properties: dict[str, any] + :ivar location: The location for managed integration runtime. The supported regions could be found on https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities. - :type location: str - :param node_size: The node size requirement to managed integration runtime. - :type node_size: str - :param number_of_nodes: The required number of nodes for managed integration runtime. - :type number_of_nodes: int - :param max_parallel_executions_per_node: Maximum parallel executions count per node for managed + :vartype location: str + :ivar node_size: The node size requirement to managed integration runtime. + :vartype node_size: str + :ivar number_of_nodes: The required number of nodes for managed integration runtime. + :vartype number_of_nodes: int + :ivar max_parallel_executions_per_node: Maximum parallel executions count per node for managed integration runtime. - :type max_parallel_executions_per_node: int - :param data_flow_properties: Data flow properties for managed integration runtime. - :type data_flow_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeDataFlowProperties - :param v_net_properties: VNet properties for managed integration runtime. - :type v_net_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties + :vartype max_parallel_executions_per_node: int + :ivar data_flow_properties: Data flow properties for managed integration runtime. + :vartype data_flow_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataFlowProperties + :ivar v_net_properties: VNet properties for managed integration runtime. + :vartype v_net_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties """ _validation = { @@ -19811,6 +27677,27 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword location: The location for managed integration runtime. The supported regions could be + found on + https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities. + :paramtype location: str + :keyword node_size: The node size requirement to managed integration runtime. + :paramtype node_size: str + :keyword number_of_nodes: The required number of nodes for managed integration runtime. + :paramtype number_of_nodes: int + :keyword max_parallel_executions_per_node: Maximum parallel executions count per node for + managed integration runtime. + :paramtype max_parallel_executions_per_node: int + :keyword data_flow_properties: Data flow properties for managed integration runtime. + :paramtype data_flow_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataFlowProperties + :keyword v_net_properties: VNet properties for managed integration runtime. + :paramtype v_net_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties + """ super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.location = kwargs.get('location', None) @@ -19826,9 +27713,9 @@ class IntegrationRuntimeConnectionInfo(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] + :vartype additional_properties: dict[str, any] :ivar service_token: The token generated in service. Callers use this token to authenticate to integration runtime. :vartype service_token: str @@ -19869,6 +27756,11 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(IntegrationRuntimeConnectionInfo, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.service_token = None @@ -19882,8 +27774,8 @@ def __init__( class IntegrationRuntimeCustomerVirtualNetwork(msrest.serialization.Model): """The definition and properties of virtual network to which Azure-SSIS integration runtime will join. - :param subnet_id: The ID of subnet to which Azure-SSIS integration runtime will join. - :type subnet_id: str + :ivar subnet_id: The ID of subnet to which Azure-SSIS integration runtime will join. + :vartype subnet_id: str """ _attribute_map = { @@ -19894,6 +27786,10 @@ def __init__( self, **kwargs ): + """ + :keyword subnet_id: The ID of subnet to which Azure-SSIS integration runtime will join. + :paramtype subnet_id: str + """ super(IntegrationRuntimeCustomerVirtualNetwork, self).__init__(**kwargs) self.subnet_id = kwargs.get('subnet_id', None) @@ -19901,11 +27797,11 @@ def __init__( class IntegrationRuntimeCustomSetupScriptProperties(msrest.serialization.Model): """Custom setup script properties for a managed dedicated integration runtime. - :param blob_container_uri: The URI of the Azure blob container that contains the custom setup + :ivar blob_container_uri: The URI of the Azure blob container that contains the custom setup script. - :type blob_container_uri: str - :param sas_token: The SAS token of the Azure blob container. - :type sas_token: ~azure.mgmt.datafactory.models.SecureString + :vartype blob_container_uri: str + :ivar sas_token: The SAS token of the Azure blob container. + :vartype sas_token: ~azure.mgmt.datafactory.models.SecureString """ _attribute_map = { @@ -19917,6 +27813,13 @@ def __init__( self, **kwargs ): + """ + :keyword blob_container_uri: The URI of the Azure blob container that contains the custom setup + script. + :paramtype blob_container_uri: str + :keyword sas_token: The SAS token of the Azure blob container. + :paramtype sas_token: ~azure.mgmt.datafactory.models.SecureString + """ super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) self.blob_container_uri = kwargs.get('blob_container_uri', None) self.sas_token = kwargs.get('sas_token', None) @@ -19925,21 +27828,21 @@ def __init__( class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model): """Data flow properties for managed integration runtime. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param compute_type: Compute type of the cluster which will execute data flow job. Possible + :vartype additional_properties: dict[str, any] + :ivar compute_type: Compute type of the cluster which will execute data flow job. Possible values include: "General", "MemoryOptimized", "ComputeOptimized". - :type compute_type: str or ~azure.mgmt.datafactory.models.DataFlowComputeType - :param core_count: Core count of the cluster which will execute data flow job. Supported values + :vartype compute_type: str or ~azure.mgmt.datafactory.models.DataFlowComputeType + :ivar core_count: Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. - :type core_count: int - :param time_to_live: Time to live (in minutes) setting of the cluster which will execute data + :vartype core_count: int + :ivar time_to_live: Time to live (in minutes) setting of the cluster which will execute data flow job. - :type time_to_live: int - :param cleanup: Cluster will not be recycled and it will be used in next data flow activity run + :vartype time_to_live: int + :ivar cleanup: Cluster will not be recycled and it will be used in next data flow activity run until TTL (time to live) is reached if this is set as false. Default is true. - :type cleanup: bool + :vartype cleanup: bool """ _validation = { @@ -19958,6 +27861,23 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword compute_type: Compute type of the cluster which will execute data flow job. Possible + values include: "General", "MemoryOptimized", "ComputeOptimized". + :paramtype compute_type: str or ~azure.mgmt.datafactory.models.DataFlowComputeType + :keyword core_count: Core count of the cluster which will execute data flow job. Supported + values are: 8, 16, 32, 48, 80, 144 and 272. + :paramtype core_count: int + :keyword time_to_live: Time to live (in minutes) setting of the cluster which will execute data + flow job. + :paramtype time_to_live: int + :keyword cleanup: Cluster will not be recycled and it will be used in next data flow activity + run until TTL (time to live) is reached if this is set as false. Default is true. + :paramtype cleanup: bool + """ super(IntegrationRuntimeDataFlowProperties, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.compute_type = kwargs.get('compute_type', None) @@ -19969,12 +27889,12 @@ def __init__( class IntegrationRuntimeDataProxyProperties(msrest.serialization.Model): """Data proxy properties for a managed dedicated integration runtime. - :param connect_via: The self-hosted integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.EntityReference - :param staging_linked_service: The staging linked service reference. - :type staging_linked_service: ~azure.mgmt.datafactory.models.EntityReference - :param path: The path to contain the staged data in the Blob storage. - :type path: str + :ivar connect_via: The self-hosted integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.EntityReference + :ivar staging_linked_service: The staging linked service reference. + :vartype staging_linked_service: ~azure.mgmt.datafactory.models.EntityReference + :ivar path: The path to contain the staged data in the Blob storage. + :vartype path: str """ _attribute_map = { @@ -19987,6 +27907,14 @@ def __init__( self, **kwargs ): + """ + :keyword connect_via: The self-hosted integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.EntityReference + :keyword staging_linked_service: The staging linked service reference. + :paramtype staging_linked_service: ~azure.mgmt.datafactory.models.EntityReference + :keyword path: The path to contain the staged data in the Blob storage. + :paramtype path: str + """ super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) self.connect_via = kwargs.get('connect_via', None) self.staging_linked_service = kwargs.get('staging_linked_service', None) @@ -19998,10 +27926,10 @@ class IntegrationRuntimeDebugResource(SubResourceDebugResource): All required parameters must be populated in order to send to Azure. - :param name: The resource name. - :type name: str - :param properties: Required. Integration runtime properties. - :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime + :ivar name: The resource name. + :vartype name: str + :ivar properties: Required. Integration runtime properties. + :vartype properties: ~azure.mgmt.datafactory.models.IntegrationRuntime """ _validation = { @@ -20017,6 +27945,12 @@ def __init__( self, **kwargs ): + """ + :keyword name: The resource name. + :paramtype name: str + :keyword properties: Required. Integration runtime properties. + :paramtype properties: ~azure.mgmt.datafactory.models.IntegrationRuntime + """ super(IntegrationRuntimeDebugResource, self).__init__(**kwargs) self.properties = kwargs['properties'] @@ -20026,10 +27960,10 @@ class IntegrationRuntimeListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of integration runtimes. - :type value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: Required. List of integration runtimes. + :vartype value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeResource] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _validation = { @@ -20045,6 +27979,12 @@ def __init__( self, **kwargs ): + """ + :keyword value: Required. List of integration runtimes. + :paramtype value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeResource] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(IntegrationRuntimeListResponse, self).__init__(**kwargs) self.value = kwargs['value'] self.next_link = kwargs.get('next_link', None) @@ -20053,10 +27993,10 @@ def __init__( class IntegrationRuntimeMonitoringData(msrest.serialization.Model): """Get monitoring data response. - :param name: Integration runtime name. - :type name: str - :param nodes: Integration runtime node monitoring data. - :type nodes: list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] + :ivar name: Integration runtime name. + :vartype name: str + :ivar nodes: Integration runtime node monitoring data. + :vartype nodes: list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] """ _attribute_map = { @@ -20068,6 +28008,12 @@ def __init__( self, **kwargs ): + """ + :keyword name: Integration runtime name. + :paramtype name: str + :keyword nodes: Integration runtime node monitoring data. + :paramtype nodes: list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] + """ super(IntegrationRuntimeMonitoringData, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.nodes = kwargs.get('nodes', None) @@ -20094,6 +28040,8 @@ def __init__( self, **kwargs ): + """ + """ super(IntegrationRuntimeNodeIpAddress, self).__init__(**kwargs) self.ip_address = None @@ -20103,9 +28051,9 @@ class IntegrationRuntimeNodeMonitoringData(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] + :vartype additional_properties: dict[str, any] :ivar node_name: Name of the integration runtime node. :vartype node_name: str :ivar available_memory_in_mb: Available memory (MB) on the integration runtime node. @@ -20152,6 +28100,11 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(IntegrationRuntimeNodeMonitoringData, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.node_name = None @@ -20167,10 +28120,10 @@ def __init__( class IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint(msrest.serialization.Model): """Azure-SSIS integration runtime outbound network dependency endpoints for one category. - :param category: The category of outbound network dependency. - :type category: str - :param endpoints: The endpoints for outbound network dependency. - :type endpoints: + :ivar category: The category of outbound network dependency. + :vartype category: str + :ivar endpoints: The endpoints for outbound network dependency. + :vartype endpoints: list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpoint] """ @@ -20183,6 +28136,13 @@ def __init__( self, **kwargs ): + """ + :keyword category: The category of outbound network dependency. + :paramtype category: str + :keyword endpoints: The endpoints for outbound network dependency. + :paramtype endpoints: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpoint] + """ super(IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint, self).__init__(**kwargs) self.category = kwargs.get('category', None) self.endpoints = kwargs.get('endpoints', None) @@ -20191,10 +28151,10 @@ def __init__( class IntegrationRuntimeOutboundNetworkDependenciesEndpoint(msrest.serialization.Model): """The endpoint for Azure-SSIS integration runtime outbound network dependency. - :param domain_name: The domain name of endpoint. - :type domain_name: str - :param endpoint_details: The details of endpoint. - :type endpoint_details: + :ivar domain_name: The domain name of endpoint. + :vartype domain_name: str + :ivar endpoint_details: The details of endpoint. + :vartype endpoint_details: list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails] """ @@ -20207,6 +28167,13 @@ def __init__( self, **kwargs ): + """ + :keyword domain_name: The domain name of endpoint. + :paramtype domain_name: str + :keyword endpoint_details: The details of endpoint. + :paramtype endpoint_details: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails] + """ super(IntegrationRuntimeOutboundNetworkDependenciesEndpoint, self).__init__(**kwargs) self.domain_name = kwargs.get('domain_name', None) self.endpoint_details = kwargs.get('endpoint_details', None) @@ -20215,8 +28182,8 @@ def __init__( class IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(msrest.serialization.Model): """The details of Azure-SSIS integration runtime outbound network dependency endpoint. - :param port: The port of endpoint. - :type port: int + :ivar port: The port of endpoint. + :vartype port: int """ _attribute_map = { @@ -20227,6 +28194,10 @@ def __init__( self, **kwargs ): + """ + :keyword port: The port of endpoint. + :paramtype port: int + """ super(IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails, self).__init__(**kwargs) self.port = kwargs.get('port', None) @@ -20234,8 +28205,8 @@ def __init__( class IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse(msrest.serialization.Model): """Azure-SSIS integration runtime outbound network dependency endpoints. - :param value: The list of outbound network dependency endpoints. - :type value: + :ivar value: The list of outbound network dependency endpoints. + :vartype value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint] """ @@ -20247,6 +28218,11 @@ def __init__( self, **kwargs ): + """ + :keyword value: The list of outbound network dependency endpoints. + :paramtype value: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint] + """ super(IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, self).__init__(**kwargs) self.value = kwargs.get('value', None) @@ -20260,10 +28236,10 @@ class IntegrationRuntimeReference(msrest.serialization.Model): :ivar type: Type of integration runtime. Has constant value: "IntegrationRuntimeReference". :vartype type: str - :param reference_name: Required. Reference integration runtime name. - :type reference_name: str - :param parameters: Arguments for integration runtime. - :type parameters: dict[str, any] + :ivar reference_name: Required. Reference integration runtime name. + :vartype reference_name: str + :ivar parameters: Arguments for integration runtime. + :vartype parameters: dict[str, any] """ _validation = { @@ -20283,6 +28259,12 @@ def __init__( self, **kwargs ): + """ + :keyword reference_name: Required. Reference integration runtime name. + :paramtype reference_name: str + :keyword parameters: Arguments for integration runtime. + :paramtype parameters: dict[str, any] + """ super(IntegrationRuntimeReference, self).__init__(**kwargs) self.reference_name = kwargs['reference_name'] self.parameters = kwargs.get('parameters', None) @@ -20291,9 +28273,9 @@ def __init__( class IntegrationRuntimeRegenerateKeyParameters(msrest.serialization.Model): """Parameters to regenerate the authentication key. - :param key_name: The name of the authentication key to regenerate. Possible values include: + :ivar key_name: The name of the authentication key to regenerate. Possible values include: "authKey1", "authKey2". - :type key_name: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName + :vartype key_name: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName """ _attribute_map = { @@ -20304,6 +28286,11 @@ def __init__( self, **kwargs ): + """ + :keyword key_name: The name of the authentication key to regenerate. Possible values include: + "authKey1", "authKey2". + :paramtype key_name: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName + """ super(IntegrationRuntimeRegenerateKeyParameters, self).__init__(**kwargs) self.key_name = kwargs.get('key_name', None) @@ -20323,8 +28310,8 @@ class IntegrationRuntimeResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Required. Integration runtime properties. - :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime + :ivar properties: Required. Integration runtime properties. + :vartype properties: ~azure.mgmt.datafactory.models.IntegrationRuntime """ _validation = { @@ -20347,6 +28334,10 @@ def __init__( self, **kwargs ): + """ + :keyword properties: Required. Integration runtime properties. + :paramtype properties: ~azure.mgmt.datafactory.models.IntegrationRuntime + """ super(IntegrationRuntimeResource, self).__init__(**kwargs) self.properties = kwargs['properties'] @@ -20354,24 +28345,24 @@ def __init__( class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): """Catalog information for managed dedicated integration runtime. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param catalog_server_endpoint: The catalog database server URL. - :type catalog_server_endpoint: str - :param catalog_admin_user_name: The administrator user name of catalog database. - :type catalog_admin_user_name: str - :param catalog_admin_password: The password of the administrator user account of the catalog + :vartype additional_properties: dict[str, any] + :ivar catalog_server_endpoint: The catalog database server URL. + :vartype catalog_server_endpoint: str + :ivar catalog_admin_user_name: The administrator user name of catalog database. + :vartype catalog_admin_user_name: str + :ivar catalog_admin_password: The password of the administrator user account of the catalog database. - :type catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString - :param catalog_pricing_tier: The pricing tier for the catalog database. The valid values could + :vartype catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString + :ivar catalog_pricing_tier: The pricing tier for the catalog database. The valid values could be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible values include: "Basic", "Standard", "Premium", "PremiumRS". - :type catalog_pricing_tier: str or + :vartype catalog_pricing_tier: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier - :param dual_standby_pair_name: The dual standby pair name of Azure-SSIS Integration Runtimes to + :ivar dual_standby_pair_name: The dual standby pair name of Azure-SSIS Integration Runtimes to support SSISDB failover. - :type dual_standby_pair_name: str + :vartype dual_standby_pair_name: str """ _validation = { @@ -20391,6 +28382,26 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword catalog_server_endpoint: The catalog database server URL. + :paramtype catalog_server_endpoint: str + :keyword catalog_admin_user_name: The administrator user name of catalog database. + :paramtype catalog_admin_user_name: str + :keyword catalog_admin_password: The password of the administrator user account of the catalog + database. + :paramtype catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString + :keyword catalog_pricing_tier: The pricing tier for the catalog database. The valid values + could be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible + values include: "Basic", "Standard", "Premium", "PremiumRS". + :paramtype catalog_pricing_tier: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier + :keyword dual_standby_pair_name: The dual standby pair name of Azure-SSIS Integration Runtimes + to support SSISDB failover. + :paramtype dual_standby_pair_name: str + """ super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.catalog_server_endpoint = kwargs.get('catalog_server_endpoint', None) @@ -20403,32 +28414,31 @@ def __init__( class IntegrationRuntimeSsisProperties(msrest.serialization.Model): """SSIS properties for managed integration runtime. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param catalog_info: Catalog information for managed dedicated integration runtime. - :type catalog_info: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo - :param license_type: License type for bringing your own license scenario. Possible values + :vartype additional_properties: dict[str, any] + :ivar catalog_info: Catalog information for managed dedicated integration runtime. + :vartype catalog_info: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo + :ivar license_type: License type for bringing your own license scenario. Possible values include: "BasePrice", "LicenseIncluded". - :type license_type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType - :param custom_setup_script_properties: Custom setup script properties for a managed dedicated + :vartype license_type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType + :ivar custom_setup_script_properties: Custom setup script properties for a managed dedicated integration runtime. - :type custom_setup_script_properties: + :vartype custom_setup_script_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties - :param data_proxy_properties: Data proxy properties for a managed dedicated integration - runtime. - :type data_proxy_properties: + :ivar data_proxy_properties: Data proxy properties for a managed dedicated integration runtime. + :vartype data_proxy_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties - :param edition: The edition for the SSIS Integration Runtime. Possible values include: + :ivar edition: The edition for the SSIS Integration Runtime. Possible values include: "Standard", "Enterprise". - :type edition: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition - :param express_custom_setup_properties: Custom setup without script properties for a SSIS + :vartype edition: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition + :ivar express_custom_setup_properties: Custom setup without script properties for a SSIS integration runtime. - :type express_custom_setup_properties: list[~azure.mgmt.datafactory.models.CustomSetupBase] - :param package_stores: Package stores for the SSIS Integration Runtime. - :type package_stores: list[~azure.mgmt.datafactory.models.PackageStore] - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype express_custom_setup_properties: list[~azure.mgmt.datafactory.models.CustomSetupBase] + :ivar package_stores: Package stores for the SSIS Integration Runtime. + :vartype package_stores: list[~azure.mgmt.datafactory.models.PackageStore] + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _attribute_map = { @@ -20447,6 +28457,35 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword catalog_info: Catalog information for managed dedicated integration runtime. + :paramtype catalog_info: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo + :keyword license_type: License type for bringing your own license scenario. Possible values + include: "BasePrice", "LicenseIncluded". + :paramtype license_type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType + :keyword custom_setup_script_properties: Custom setup script properties for a managed dedicated + integration runtime. + :paramtype custom_setup_script_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties + :keyword data_proxy_properties: Data proxy properties for a managed dedicated integration + runtime. + :paramtype data_proxy_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties + :keyword edition: The edition for the SSIS Integration Runtime. Possible values include: + "Standard", "Enterprise". + :paramtype edition: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition + :keyword express_custom_setup_properties: Custom setup without script properties for a SSIS + integration runtime. + :paramtype express_custom_setup_properties: + list[~azure.mgmt.datafactory.models.CustomSetupBase] + :keyword package_stores: Package stores for the SSIS Integration Runtime. + :paramtype package_stores: list[~azure.mgmt.datafactory.models.PackageStore] + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.catalog_info = kwargs.get('catalog_info', None) @@ -20469,12 +28508,12 @@ class IntegrationRuntimeStatus(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of integration runtime.Constant filled by server. Possible values + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". - :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType + :vartype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType :ivar data_factory_name: The data factory name which the integration runtime belong to. :vartype data_factory_name: str :ivar state: The state of integration runtime. Possible values include: "Initial", "Stopped", @@ -20504,6 +28543,11 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(IntegrationRuntimeStatus, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.type = 'IntegrationRuntimeStatus' # type: str @@ -20516,10 +28560,10 @@ class IntegrationRuntimeStatusListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of integration runtime status. - :type value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: Required. List of integration runtime status. + :vartype value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _validation = { @@ -20535,6 +28579,12 @@ def __init__( self, **kwargs ): + """ + :keyword value: Required. List of integration runtime status. + :paramtype value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(IntegrationRuntimeStatusListResponse, self).__init__(**kwargs) self.value = kwargs['value'] self.next_link = kwargs.get('next_link', None) @@ -20549,8 +28599,8 @@ class IntegrationRuntimeStatusResponse(msrest.serialization.Model): :ivar name: The integration runtime name. :vartype name: str - :param properties: Required. Integration runtime properties. - :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus + :ivar properties: Required. Integration runtime properties. + :vartype properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus """ _validation = { @@ -20567,6 +28617,10 @@ def __init__( self, **kwargs ): + """ + :keyword properties: Required. Integration runtime properties. + :paramtype properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus + """ super(IntegrationRuntimeStatusResponse, self).__init__(**kwargs) self.name = None self.properties = kwargs['properties'] @@ -20575,19 +28629,18 @@ def __init__( class IntegrationRuntimeVNetProperties(msrest.serialization.Model): """VNet properties for managed integration runtime. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param v_net_id: The ID of the VNet that this integration runtime will join. - :type v_net_id: str - :param subnet: The name of the subnet this integration runtime will join. - :type subnet: str - :param public_i_ps: Resource IDs of the public IP addresses that this integration runtime will + :vartype additional_properties: dict[str, any] + :ivar v_net_id: The ID of the VNet that this integration runtime will join. + :vartype v_net_id: str + :ivar subnet: The name of the subnet this integration runtime will join. + :vartype subnet: str + :ivar public_i_ps: Resource IDs of the public IP addresses that this integration runtime will use. - :type public_i_ps: list[str] - :param subnet_id: The ID of subnet, to which this Azure-SSIS integration runtime will be - joined. - :type subnet_id: str + :vartype public_i_ps: list[str] + :ivar subnet_id: The ID of subnet, to which this Azure-SSIS integration runtime will be joined. + :vartype subnet_id: str """ _attribute_map = { @@ -20602,6 +28655,21 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword v_net_id: The ID of the VNet that this integration runtime will join. + :paramtype v_net_id: str + :keyword subnet: The name of the subnet this integration runtime will join. + :paramtype subnet: str + :keyword public_i_ps: Resource IDs of the public IP addresses that this integration runtime + will use. + :paramtype public_i_ps: list[str] + :keyword subnet_id: The ID of subnet, to which this Azure-SSIS integration runtime will be + joined. + :paramtype subnet_id: str + """ super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.v_net_id = kwargs.get('v_net_id', None) @@ -20615,44 +28683,43 @@ class JiraLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. The IP address or host name of the Jira service. (e.g. - jira.example.com). - :type host: any - :param port: The TCP port that the Jira server uses to listen for client connections. The + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. The IP address or host name of the Jira service. (e.g. jira.example.com). + :vartype host: any + :ivar port: The TCP port that the Jira server uses to listen for client connections. The default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. - :type port: any - :param username: Required. The user name that you use to access Jira Service. - :type username: any - :param password: The password corresponding to the user name that you provided in the username + :vartype port: any + :ivar username: Required. The user name that you use to access Jira Service. + :vartype username: any + :ivar password: The password corresponding to the user name that you provided in the username field. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -20682,6 +28749,44 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. The IP address or host name of the Jira service. (e.g. + jira.example.com). + :paramtype host: any + :keyword port: The TCP port that the Jira server uses to listen for client connections. The + default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. + :paramtype port: any + :keyword username: Required. The user name that you use to access Jira Service. + :paramtype username: any + :keyword password: The password corresponding to the user name that you provided in the + username field. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(JiraLinkedService, self).__init__(**kwargs) self.type = 'Jira' # type: str self.host = kwargs['host'] @@ -20699,30 +28804,30 @@ class JiraObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -20747,6 +28852,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(JiraObjectDataset, self).__init__(**kwargs) self.type = 'JiraObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -20757,32 +28886,32 @@ class JiraSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -20805,6 +28934,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(JiraSource, self).__init__(**kwargs) self.type = 'JiraSource' # type: str self.query = kwargs.get('query', None) @@ -20815,38 +28970,38 @@ class JsonDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param location: The location of the json data storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param encoding_name: The code page name of the preferred encoding. If not specified, the + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar location: The location of the json data storage. + :vartype location: ~azure.mgmt.datafactory.models.DatasetLocation + :ivar encoding_name: The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). - :type encoding_name: any - :param compression: The data compression method used for the json dataset. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + :vartype encoding_name: any + :ivar compression: The data compression method used for the json dataset. + :vartype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -20873,6 +29028,38 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword location: The location of the json data storage. + :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation + :keyword encoding_name: The code page name of the preferred encoding. If not specified, the + default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column + of the table in the following link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + resultType string). + :paramtype encoding_name: any + :keyword compression: The data compression method used for the json dataset. + :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ super(JsonDataset, self).__init__(**kwargs) self.type = 'Json' # type: str self.location = kwargs.get('location', None) @@ -20885,36 +29072,36 @@ class JsonFormat(DatasetStorageFormat): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage format.Constant filled by server. - :type type: str - :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: any - :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: any - :param file_pattern: File pattern of JSON. To be more specific, the way of separating a + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage format.Constant filled by server. + :vartype type: str + :ivar serializer: Serializer. Type: string (or Expression with resultType string). + :vartype serializer: any + :ivar deserializer: Deserializer. Type: string (or Expression with resultType string). + :vartype deserializer: any + :ivar file_pattern: File pattern of JSON. To be more specific, the way of separating a collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. - :type file_pattern: any - :param nesting_separator: The character used to separate nesting levels. Default value is '.' + :vartype file_pattern: any + :ivar nesting_separator: The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). - :type nesting_separator: any - :param encoding_name: The code page name of the preferred encoding. If not provided, the - default value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. - The full list of supported values can be found in the 'Name' column of the table of encodings - in the following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or + :vartype nesting_separator: any + :ivar encoding_name: The code page name of the preferred encoding. If not provided, the default + value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. The full + list of supported values can be found in the 'Name' column of the table of encodings in the + following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or Expression with resultType string). - :type encoding_name: any - :param json_node_reference: The JSONPath of the JSON array element to be flattened. Example: + :vartype encoding_name: any + :ivar json_node_reference: The JSONPath of the JSON array element to be flattened. Example: "$.ArrayPath". Type: string (or Expression with resultType string). - :type json_node_reference: any - :param json_path_definition: The JSONPath definition for each column mapping with a customized + :vartype json_node_reference: any + :ivar json_path_definition: The JSONPath definition for each column mapping with a customized column name to extract data from JSON file. For fields under root object, start with "$"; for fields inside the array chosen by jsonNodeReference property, start from the array element. Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. Type: object (or Expression with resultType object). - :type json_path_definition: any + :vartype json_path_definition: any """ _validation = { @@ -20937,6 +29124,36 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword serializer: Serializer. Type: string (or Expression with resultType string). + :paramtype serializer: any + :keyword deserializer: Deserializer. Type: string (or Expression with resultType string). + :paramtype deserializer: any + :keyword file_pattern: File pattern of JSON. To be more specific, the way of separating a + collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. + :paramtype file_pattern: any + :keyword nesting_separator: The character used to separate nesting levels. Default value is '.' + (dot). Type: string (or Expression with resultType string). + :paramtype nesting_separator: any + :keyword encoding_name: The code page name of the preferred encoding. If not provided, the + default value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. + The full list of supported values can be found in the 'Name' column of the table of encodings + in the following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or + Expression with resultType string). + :paramtype encoding_name: any + :keyword json_node_reference: The JSONPath of the JSON array element to be flattened. Example: + "$.ArrayPath". Type: string (or Expression with resultType string). + :paramtype json_node_reference: any + :keyword json_path_definition: The JSONPath definition for each column mapping with a + customized column name to extract data from JSON file. For fields under root object, start with + "$"; for fields inside the array chosen by jsonNodeReference property, start from the array + element. Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. Type: object + (or Expression with resultType object). + :paramtype json_path_definition: any + """ super(JsonFormat, self).__init__(**kwargs) self.type = 'JsonFormat' # type: str self.file_pattern = kwargs.get('file_pattern', None) @@ -20951,13 +29168,13 @@ class JsonReadSettings(FormatReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param compression_properties: Compression settings. - :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar compression_properties: Compression settings. + :vartype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings """ _validation = { @@ -20974,6 +29191,13 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword compression_properties: Compression settings. + :paramtype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings + """ super(JsonReadSettings, self).__init__(**kwargs) self.type = 'JsonReadSettings' # type: str self.compression_properties = kwargs.get('compression_properties', None) @@ -20984,33 +29208,33 @@ class JsonSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: Json store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: Json format settings. - :type format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings + :vartype disable_metrics_collection: any + :ivar store_settings: Json store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :ivar format_settings: Json format settings. + :vartype format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings """ _validation = { @@ -21034,6 +29258,33 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: Json store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :keyword format_settings: Json format settings. + :paramtype format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings + """ super(JsonSink, self).__init__(**kwargs) self.type = 'JsonSink' # type: str self.store_settings = kwargs.get('store_settings', None) @@ -21045,30 +29296,30 @@ class JsonSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: Json store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param format_settings: Json format settings. - :type format_settings: ~azure.mgmt.datafactory.models.JsonReadSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype disable_metrics_collection: any + :ivar store_settings: Json store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :ivar format_settings: Json format settings. + :vartype format_settings: ~azure.mgmt.datafactory.models.JsonReadSettings + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -21091,6 +29342,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: Json store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :keyword format_settings: Json format settings. + :paramtype format_settings: ~azure.mgmt.datafactory.models.JsonReadSettings + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(JsonSource, self).__init__(**kwargs) self.type = 'JsonSource' # type: str self.store_settings = kwargs.get('store_settings', None) @@ -21103,14 +29378,14 @@ class JsonWriteSettings(FormatWriteSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param file_pattern: File pattern of JSON. This setting controls the way a collection of JSON + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str + :ivar file_pattern: File pattern of JSON. This setting controls the way a collection of JSON objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. - :type file_pattern: any + :vartype file_pattern: any """ _validation = { @@ -21127,6 +29402,14 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword file_pattern: File pattern of JSON. This setting controls the way a collection of JSON + objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. + :paramtype file_pattern: any + """ super(JsonWriteSettings, self).__init__(**kwargs) self.type = 'JsonWriteSettings' # type: str self.file_pattern = kwargs.get('file_pattern', None) @@ -21171,6 +29454,8 @@ def __init__( self, **kwargs ): + """ + """ super(LinkedIntegrationRuntime, self).__init__(**kwargs) self.name = None self.subscription_id = None @@ -21187,9 +29472,9 @@ class LinkedIntegrationRuntimeType(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param authorization_type: Required. The authorization type for integration runtime + :ivar authorization_type: Required. The authorization type for integration runtime sharing.Constant filled by server. - :type authorization_type: str + :vartype authorization_type: str """ _validation = { @@ -21208,6 +29493,8 @@ def __init__( self, **kwargs ): + """ + """ super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) self.authorization_type = None # type: Optional[str] @@ -21217,11 +29504,11 @@ class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): All required parameters must be populated in order to send to Azure. - :param authorization_type: Required. The authorization type for integration runtime + :ivar authorization_type: Required. The authorization type for integration runtime sharing.Constant filled by server. - :type authorization_type: str - :param key: Required. The key used for authorization. - :type key: ~azure.mgmt.datafactory.models.SecureString + :vartype authorization_type: str + :ivar key: Required. The key used for authorization. + :vartype key: ~azure.mgmt.datafactory.models.SecureString """ _validation = { @@ -21238,6 +29525,10 @@ def __init__( self, **kwargs ): + """ + :keyword key: Required. The key used for authorization. + :paramtype key: ~azure.mgmt.datafactory.models.SecureString + """ super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) self.authorization_type = 'Key' # type: str self.key = kwargs['key'] @@ -21248,11 +29539,11 @@ class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): All required parameters must be populated in order to send to Azure. - :param authorization_type: Required. The authorization type for integration runtime + :ivar authorization_type: Required. The authorization type for integration runtime sharing.Constant filled by server. - :type authorization_type: str - :param resource_id: Required. The resource identifier of the integration runtime to be shared. - :type resource_id: str + :vartype authorization_type: str + :ivar resource_id: Required. The resource identifier of the integration runtime to be shared. + :vartype resource_id: str """ _validation = { @@ -21269,6 +29560,11 @@ def __init__( self, **kwargs ): + """ + :keyword resource_id: Required. The resource identifier of the integration runtime to be + shared. + :paramtype resource_id: str + """ super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) self.authorization_type = 'RBAC' # type: str self.resource_id = kwargs['resource_id'] @@ -21279,8 +29575,8 @@ class LinkedIntegrationRuntimeRequest(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param linked_factory_name: Required. The data factory name for linked integration runtime. - :type linked_factory_name: str + :ivar linked_factory_name: Required. The data factory name for linked integration runtime. + :vartype linked_factory_name: str """ _validation = { @@ -21295,6 +29591,10 @@ def __init__( self, **kwargs ): + """ + :keyword linked_factory_name: Required. The data factory name for linked integration runtime. + :paramtype linked_factory_name: str + """ super(LinkedIntegrationRuntimeRequest, self).__init__(**kwargs) self.linked_factory_name = kwargs['linked_factory_name'] @@ -21304,10 +29604,10 @@ class LinkedServiceDebugResource(SubResourceDebugResource): All required parameters must be populated in order to send to Azure. - :param name: The resource name. - :type name: str - :param properties: Required. Properties of linked service. - :type properties: ~azure.mgmt.datafactory.models.LinkedService + :ivar name: The resource name. + :vartype name: str + :ivar properties: Required. Properties of linked service. + :vartype properties: ~azure.mgmt.datafactory.models.LinkedService """ _validation = { @@ -21323,6 +29623,12 @@ def __init__( self, **kwargs ): + """ + :keyword name: The resource name. + :paramtype name: str + :keyword properties: Required. Properties of linked service. + :paramtype properties: ~azure.mgmt.datafactory.models.LinkedService + """ super(LinkedServiceDebugResource, self).__init__(**kwargs) self.properties = kwargs['properties'] @@ -21332,10 +29638,10 @@ class LinkedServiceListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of linked services. - :type value: list[~azure.mgmt.datafactory.models.LinkedServiceResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: Required. List of linked services. + :vartype value: list[~azure.mgmt.datafactory.models.LinkedServiceResource] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _validation = { @@ -21351,6 +29657,12 @@ def __init__( self, **kwargs ): + """ + :keyword value: Required. List of linked services. + :paramtype value: list[~azure.mgmt.datafactory.models.LinkedServiceResource] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(LinkedServiceListResponse, self).__init__(**kwargs) self.value = kwargs['value'] self.next_link = kwargs.get('next_link', None) @@ -21365,10 +29677,10 @@ class LinkedServiceReference(msrest.serialization.Model): :ivar type: Linked service reference type. Has constant value: "LinkedServiceReference". :vartype type: str - :param reference_name: Required. Reference LinkedService name. - :type reference_name: str - :param parameters: Arguments for LinkedService. - :type parameters: dict[str, any] + :ivar reference_name: Required. Reference LinkedService name. + :vartype reference_name: str + :ivar parameters: Arguments for LinkedService. + :vartype parameters: dict[str, any] """ _validation = { @@ -21388,6 +29700,12 @@ def __init__( self, **kwargs ): + """ + :keyword reference_name: Required. Reference LinkedService name. + :paramtype reference_name: str + :keyword parameters: Arguments for LinkedService. + :paramtype parameters: dict[str, any] + """ super(LinkedServiceReference, self).__init__(**kwargs) self.reference_name = kwargs['reference_name'] self.parameters = kwargs.get('parameters', None) @@ -21408,8 +29726,8 @@ class LinkedServiceResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Required. Properties of linked service. - :type properties: ~azure.mgmt.datafactory.models.LinkedService + :ivar properties: Required. Properties of linked service. + :vartype properties: ~azure.mgmt.datafactory.models.LinkedService """ _validation = { @@ -21432,6 +29750,10 @@ def __init__( self, **kwargs ): + """ + :keyword properties: Required. Properties of linked service. + :paramtype properties: ~azure.mgmt.datafactory.models.LinkedService + """ super(LinkedServiceResource, self).__init__(**kwargs) self.properties = kwargs['properties'] @@ -21441,11 +29763,11 @@ class LogLocationSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param linked_service_name: Required. Log storage linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param path: The path to storage for storing detailed logs of activity execution. Type: string + :ivar linked_service_name: Required. Log storage linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar path: The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). - :type path: any + :vartype path: any """ _validation = { @@ -21461,6 +29783,13 @@ def __init__( self, **kwargs ): + """ + :keyword linked_service_name: Required. Log storage linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword path: The path to storage for storing detailed logs of activity execution. Type: + string (or Expression with resultType string). + :paramtype path: any + """ super(LogLocationSettings, self).__init__(**kwargs) self.linked_service_name = kwargs['linked_service_name'] self.path = kwargs.get('path', None) @@ -21471,14 +29800,14 @@ class LogSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param enable_copy_activity_log: Specifies whether to enable copy activity log. Type: boolean + :ivar enable_copy_activity_log: Specifies whether to enable copy activity log. Type: boolean (or Expression with resultType boolean). - :type enable_copy_activity_log: any - :param copy_activity_log_settings: Specifies settings for copy activity log. - :type copy_activity_log_settings: ~azure.mgmt.datafactory.models.CopyActivityLogSettings - :param log_location_settings: Required. Log location settings customer needs to provide when + :vartype enable_copy_activity_log: any + :ivar copy_activity_log_settings: Specifies settings for copy activity log. + :vartype copy_activity_log_settings: ~azure.mgmt.datafactory.models.CopyActivityLogSettings + :ivar log_location_settings: Required. Log location settings customer needs to provide when enabling log. - :type log_location_settings: ~azure.mgmt.datafactory.models.LogLocationSettings + :vartype log_location_settings: ~azure.mgmt.datafactory.models.LogLocationSettings """ _validation = { @@ -21495,6 +29824,16 @@ def __init__( self, **kwargs ): + """ + :keyword enable_copy_activity_log: Specifies whether to enable copy activity log. Type: boolean + (or Expression with resultType boolean). + :paramtype enable_copy_activity_log: any + :keyword copy_activity_log_settings: Specifies settings for copy activity log. + :paramtype copy_activity_log_settings: ~azure.mgmt.datafactory.models.CopyActivityLogSettings + :keyword log_location_settings: Required. Log location settings customer needs to provide when + enabling log. + :paramtype log_location_settings: ~azure.mgmt.datafactory.models.LogLocationSettings + """ super(LogSettings, self).__init__(**kwargs) self.enable_copy_activity_log = kwargs.get('enable_copy_activity_log', None) self.copy_activity_log_settings = kwargs.get('copy_activity_log_settings', None) @@ -21506,20 +29845,20 @@ class LogStorageSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param linked_service_name: Required. Log storage linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param path: The path to storage for storing detailed logs of activity execution. Type: string + :vartype additional_properties: dict[str, any] + :ivar linked_service_name: Required. Log storage linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar path: The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). - :type path: any - :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or + :vartype path: any + :ivar log_level: Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). - :type log_level: any - :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or + :vartype log_level: any + :ivar enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). - :type enable_reliable_logging: any + :vartype enable_reliable_logging: any """ _validation = { @@ -21538,6 +29877,22 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword linked_service_name: Required. Log storage linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword path: The path to storage for storing detailed logs of activity execution. Type: + string (or Expression with resultType string). + :paramtype path: any + :keyword log_level: Gets or sets the log level, support: Info, Warning. Type: string (or + Expression with resultType string). + :paramtype log_level: any + :keyword enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean + (or Expression with resultType boolean). + :paramtype enable_reliable_logging: any + """ super(LogStorageSettings, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.linked_service_name = kwargs['linked_service_name'] @@ -21551,30 +29906,30 @@ class LookupActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param source: Required. Dataset-specific source properties, same as copy activity source. - :type source: ~azure.mgmt.datafactory.models.CopySource - :param dataset: Required. Lookup activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param first_row_only: Whether to return first row or all rows. Default value is true. Type: + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar source: Required. Dataset-specific source properties, same as copy activity source. + :vartype source: ~azure.mgmt.datafactory.models.CopySource + :ivar dataset: Required. Lookup activity dataset reference. + :vartype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :ivar first_row_only: Whether to return first row or all rows. Default value is true. Type: boolean (or Expression with resultType boolean). - :type first_row_only: any + :vartype first_row_only: any """ _validation = { @@ -21602,6 +29957,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword source: Required. Dataset-specific source properties, same as copy activity source. + :paramtype source: ~azure.mgmt.datafactory.models.CopySource + :keyword dataset: Required. Lookup activity dataset reference. + :paramtype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :keyword first_row_only: Whether to return first row or all rows. Default value is true. Type: + boolean (or Expression with resultType boolean). + :paramtype first_row_only: any + """ super(LookupActivity, self).__init__(**kwargs) self.type = 'Lookup' # type: str self.source = kwargs['source'] @@ -21614,37 +29993,37 @@ class MagentoLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. The URL of the Magento instance. (i.e. 192.168.222.110/magento3). - :type host: any - :param access_token: The access token from Magento. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. The URL of the Magento instance. (i.e. 192.168.222.110/magento3). + :vartype host: any + :ivar access_token: The access token from Magento. + :vartype access_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -21671,6 +30050,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. The URL of the Magento instance. (i.e. 192.168.222.110/magento3). + :paramtype host: any + :keyword access_token: The access token from Magento. + :paramtype access_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(MagentoLinkedService, self).__init__(**kwargs) self.type = 'Magento' # type: str self.host = kwargs['host'] @@ -21686,30 +30096,30 @@ class MagentoObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -21734,6 +30144,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(MagentoObjectDataset, self).__init__(**kwargs) self.type = 'MagentoObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -21744,32 +30178,32 @@ class MagentoSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -21792,6 +30226,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(MagentoSource, self).__init__(**kwargs) self.type = 'MagentoSource' # type: str self.query = kwargs.get('query', None) @@ -21802,17 +30262,17 @@ class ManagedIdentityCredential(Credential): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of credential.Constant filled by server. - :type type: str - :param description: Credential description. - :type description: str - :param annotations: List of tags that can be used for describing the Credential. - :type annotations: list[any] - :param resource_id: The resource id of user assigned managed identity. - :type resource_id: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of credential.Constant filled by server. + :vartype type: str + :ivar description: Credential description. + :vartype description: str + :ivar annotations: List of tags that can be used for describing the Credential. + :vartype annotations: list[any] + :ivar resource_id: The resource id of user assigned managed identity. + :vartype resource_id: str """ _validation = { @@ -21831,6 +30291,17 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Credential description. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the Credential. + :paramtype annotations: list[any] + :keyword resource_id: The resource id of user assigned managed identity. + :paramtype resource_id: str + """ super(ManagedIdentityCredential, self).__init__(**kwargs) self.type = 'ManagedIdentity' # type: str self.resource_id = kwargs.get('resource_id', None) @@ -21843,27 +30314,27 @@ class ManagedIntegrationRuntime(IntegrationRuntime): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of integration runtime.Constant filled by server. Possible values + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". - :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType - :param description: Integration runtime description. - :type description: str + :vartype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType + :ivar description: Integration runtime description. + :vartype description: str :ivar state: Integration runtime state, only valid for managed dedicated integration runtime. Possible values include: "Initial", "Stopped", "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", "AccessDenied". :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param managed_virtual_network: Managed Virtual Network reference. - :type managed_virtual_network: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkReference - :param compute_properties: The compute resource for managed integration runtime. - :type compute_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeComputeProperties - :param ssis_properties: SSIS properties for managed integration runtime. - :type ssis_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties - :param customer_virtual_network: The name of virtual network to which Azure-SSIS integration + :ivar managed_virtual_network: Managed Virtual Network reference. + :vartype managed_virtual_network: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkReference + :ivar compute_properties: The compute resource for managed integration runtime. + :vartype compute_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeComputeProperties + :ivar ssis_properties: SSIS properties for managed integration runtime. + :vartype ssis_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties + :ivar customer_virtual_network: The name of virtual network to which Azure-SSIS integration runtime will join. - :type customer_virtual_network: + :vartype customer_virtual_network: ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomerVirtualNetwork """ @@ -21887,6 +30358,25 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Integration runtime description. + :paramtype description: str + :keyword managed_virtual_network: Managed Virtual Network reference. + :paramtype managed_virtual_network: + ~azure.mgmt.datafactory.models.ManagedVirtualNetworkReference + :keyword compute_properties: The compute resource for managed integration runtime. + :paramtype compute_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeComputeProperties + :keyword ssis_properties: SSIS properties for managed integration runtime. + :paramtype ssis_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties + :keyword customer_virtual_network: The name of virtual network to which Azure-SSIS integration + runtime will join. + :paramtype customer_virtual_network: + ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomerVirtualNetwork + """ super(ManagedIntegrationRuntime, self).__init__(**kwargs) self.type = 'Managed' # type: str self.state = None @@ -21901,9 +30391,9 @@ class ManagedIntegrationRuntimeError(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] + :vartype additional_properties: dict[str, any] :ivar time: The time when the error occurred. :vartype time: ~datetime.datetime :ivar code: Error code. @@ -21933,6 +30423,11 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(ManagedIntegrationRuntimeError, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.time = None @@ -21946,16 +30441,16 @@ class ManagedIntegrationRuntimeNode(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] + :vartype additional_properties: dict[str, any] :ivar node_id: The managed integration runtime node id. :vartype node_id: str :ivar status: The managed integration runtime node status. Possible values include: "Starting", "Available", "Recycling", "Unavailable". :vartype status: str or ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNodeStatus - :param errors: The errors that occurred on this integration runtime node. - :type errors: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] + :ivar errors: The errors that occurred on this integration runtime node. + :vartype errors: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] """ _validation = { @@ -21974,6 +30469,13 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword errors: The errors that occurred on this integration runtime node. + :paramtype errors: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] + """ super(ManagedIntegrationRuntimeNode, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.node_id = None @@ -21986,9 +30488,9 @@ class ManagedIntegrationRuntimeOperationResult(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] + :vartype additional_properties: dict[str, any] :ivar type: The operation type. Could be start or stop. :vartype type: str :ivar start_time: The start time of the operation. @@ -22026,6 +30528,11 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(ManagedIntegrationRuntimeOperationResult, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.type = None @@ -22043,12 +30550,12 @@ class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of integration runtime.Constant filled by server. Possible values + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". - :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType + :vartype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType :ivar data_factory_name: The data factory name which the integration runtime belong to. :vartype data_factory_name: str :ivar state: The state of integration runtime. Possible values include: "Initial", "Stopped", @@ -22091,6 +30598,11 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(ManagedIntegrationRuntimeStatus, self).__init__(**kwargs) self.type = 'Managed' # type: str self.create_time = None @@ -22104,20 +30616,20 @@ class ManagedPrivateEndpoint(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param connection_state: The managed private endpoint connection state. - :type connection_state: ~azure.mgmt.datafactory.models.ConnectionStateProperties - :param fqdns: Fully qualified domain names. - :type fqdns: list[str] - :param group_id: The groupId to which the managed private endpoint is created. - :type group_id: str + :vartype additional_properties: dict[str, any] + :ivar connection_state: The managed private endpoint connection state. + :vartype connection_state: ~azure.mgmt.datafactory.models.ConnectionStateProperties + :ivar fqdns: Fully qualified domain names. + :vartype fqdns: list[str] + :ivar group_id: The groupId to which the managed private endpoint is created. + :vartype group_id: str :ivar is_reserved: Denotes whether the managed private endpoint is reserved. :vartype is_reserved: bool - :param private_link_resource_id: The ARM resource ID of the resource to which the managed + :ivar private_link_resource_id: The ARM resource ID of the resource to which the managed private endpoint is created. - :type private_link_resource_id: str + :vartype private_link_resource_id: str :ivar provisioning_state: The managed private endpoint provisioning state. :vartype provisioning_state: str """ @@ -22141,6 +30653,20 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connection_state: The managed private endpoint connection state. + :paramtype connection_state: ~azure.mgmt.datafactory.models.ConnectionStateProperties + :keyword fqdns: Fully qualified domain names. + :paramtype fqdns: list[str] + :keyword group_id: The groupId to which the managed private endpoint is created. + :paramtype group_id: str + :keyword private_link_resource_id: The ARM resource ID of the resource to which the managed + private endpoint is created. + :paramtype private_link_resource_id: str + """ super(ManagedPrivateEndpoint, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.connection_state = kwargs.get('connection_state', None) @@ -22156,10 +30682,10 @@ class ManagedPrivateEndpointListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of managed private endpoints. - :type value: list[~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: Required. List of managed private endpoints. + :vartype value: list[~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _validation = { @@ -22175,6 +30701,12 @@ def __init__( self, **kwargs ): + """ + :keyword value: Required. List of managed private endpoints. + :paramtype value: list[~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(ManagedPrivateEndpointListResponse, self).__init__(**kwargs) self.value = kwargs['value'] self.next_link = kwargs.get('next_link', None) @@ -22195,8 +30727,8 @@ class ManagedPrivateEndpointResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Required. Managed private endpoint properties. - :type properties: ~azure.mgmt.datafactory.models.ManagedPrivateEndpoint + :ivar properties: Required. Managed private endpoint properties. + :vartype properties: ~azure.mgmt.datafactory.models.ManagedPrivateEndpoint """ _validation = { @@ -22219,6 +30751,10 @@ def __init__( self, **kwargs ): + """ + :keyword properties: Required. Managed private endpoint properties. + :paramtype properties: ~azure.mgmt.datafactory.models.ManagedPrivateEndpoint + """ super(ManagedPrivateEndpointResource, self).__init__(**kwargs) self.properties = kwargs['properties'] @@ -22228,9 +30764,9 @@ class ManagedVirtualNetwork(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] + :vartype additional_properties: dict[str, any] :ivar v_net_id: Managed Virtual Network ID. :vartype v_net_id: str :ivar alias: Managed Virtual Network alias. @@ -22252,6 +30788,11 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(ManagedVirtualNetwork, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.v_net_id = None @@ -22263,10 +30804,10 @@ class ManagedVirtualNetworkListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of managed Virtual Networks. - :type value: list[~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: Required. List of managed Virtual Networks. + :vartype value: list[~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _validation = { @@ -22282,6 +30823,12 @@ def __init__( self, **kwargs ): + """ + :keyword value: Required. List of managed Virtual Networks. + :paramtype value: list[~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(ManagedVirtualNetworkListResponse, self).__init__(**kwargs) self.value = kwargs['value'] self.next_link = kwargs.get('next_link', None) @@ -22297,8 +30844,8 @@ class ManagedVirtualNetworkReference(msrest.serialization.Model): :ivar type: Managed Virtual Network reference type. Has constant value: "ManagedVirtualNetworkReference". :vartype type: str - :param reference_name: Required. Reference ManagedVirtualNetwork name. - :type reference_name: str + :ivar reference_name: Required. Reference ManagedVirtualNetwork name. + :vartype reference_name: str """ _validation = { @@ -22317,6 +30864,10 @@ def __init__( self, **kwargs ): + """ + :keyword reference_name: Required. Reference ManagedVirtualNetwork name. + :paramtype reference_name: str + """ super(ManagedVirtualNetworkReference, self).__init__(**kwargs) self.reference_name = kwargs['reference_name'] @@ -22336,8 +30887,8 @@ class ManagedVirtualNetworkResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Required. Managed Virtual Network properties. - :type properties: ~azure.mgmt.datafactory.models.ManagedVirtualNetwork + :ivar properties: Required. Managed Virtual Network properties. + :vartype properties: ~azure.mgmt.datafactory.models.ManagedVirtualNetwork """ _validation = { @@ -22360,6 +30911,10 @@ def __init__( self, **kwargs ): + """ + :keyword properties: Required. Managed Virtual Network properties. + :paramtype properties: ~azure.mgmt.datafactory.models.ManagedVirtualNetwork + """ super(ManagedVirtualNetworkResource, self).__init__(**kwargs) self.properties = kwargs['properties'] @@ -22369,25 +30924,25 @@ class MappingDataFlow(DataFlow): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of data flow.Constant filled by server. - :type type: str - :param description: The description of the data flow. - :type description: str - :param annotations: List of tags that can be used for describing the data flow. - :type annotations: list[any] - :param folder: The folder that this data flow is in. If not specified, Data flow will appear at + :ivar type: Required. Type of data flow.Constant filled by server. + :vartype type: str + :ivar description: The description of the data flow. + :vartype description: str + :ivar annotations: List of tags that can be used for describing the data flow. + :vartype annotations: list[any] + :ivar folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder - :param sources: List of sources in data flow. - :type sources: list[~azure.mgmt.datafactory.models.DataFlowSource] - :param sinks: List of sinks in data flow. - :type sinks: list[~azure.mgmt.datafactory.models.DataFlowSink] - :param transformations: List of transformations in data flow. - :type transformations: list[~azure.mgmt.datafactory.models.Transformation] - :param script: DataFlow script. - :type script: str - :param script_lines: Data flow script lines. - :type script_lines: list[str] + :vartype folder: ~azure.mgmt.datafactory.models.DataFlowFolder + :ivar sources: List of sources in data flow. + :vartype sources: list[~azure.mgmt.datafactory.models.DataFlowSource] + :ivar sinks: List of sinks in data flow. + :vartype sinks: list[~azure.mgmt.datafactory.models.DataFlowSink] + :ivar transformations: List of transformations in data flow. + :vartype transformations: list[~azure.mgmt.datafactory.models.Transformation] + :ivar script: DataFlow script. + :vartype script: str + :ivar script_lines: Data flow script lines. + :vartype script_lines: list[str] """ _validation = { @@ -22410,6 +30965,25 @@ def __init__( self, **kwargs ): + """ + :keyword description: The description of the data flow. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the data flow. + :paramtype annotations: list[any] + :keyword folder: The folder that this data flow is in. If not specified, Data flow will appear + at the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DataFlowFolder + :keyword sources: List of sources in data flow. + :paramtype sources: list[~azure.mgmt.datafactory.models.DataFlowSource] + :keyword sinks: List of sinks in data flow. + :paramtype sinks: list[~azure.mgmt.datafactory.models.DataFlowSink] + :keyword transformations: List of transformations in data flow. + :paramtype transformations: list[~azure.mgmt.datafactory.models.Transformation] + :keyword script: DataFlow script. + :paramtype script: str + :keyword script_lines: Data flow script lines. + :paramtype script_lines: list[str] + """ super(MappingDataFlow, self).__init__(**kwargs) self.type = 'MappingDataFlow' # type: str self.sources = kwargs.get('sources', None) @@ -22424,28 +30998,28 @@ class MariaDBLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: An ODBC connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_string: any + :ivar pwd: The Azure key vault secret reference of password in connection string. + :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -22468,6 +31042,28 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword pwd: The Azure key vault secret reference of password in connection string. + :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(MariaDBLinkedService, self).__init__(**kwargs) self.type = 'MariaDB' # type: str self.connection_string = kwargs.get('connection_string', None) @@ -22480,32 +31076,32 @@ class MariaDBSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -22528,6 +31124,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(MariaDBSource, self).__init__(**kwargs) self.type = 'MariaDBSource' # type: str self.query = kwargs.get('query', None) @@ -22538,30 +31160,30 @@ class MariaDBTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -22586,6 +31208,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(MariaDBTableDataset, self).__init__(**kwargs) self.type = 'MariaDBTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -22596,39 +31242,39 @@ class MarketoLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param endpoint: Required. The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com). - :type endpoint: any - :param client_id: Required. The client Id of your Marketo service. - :type client_id: any - :param client_secret: The client secret of your Marketo service. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar endpoint: Required. The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com). + :vartype endpoint: any + :ivar client_id: Required. The client Id of your Marketo service. + :vartype client_id: any + :ivar client_secret: The client secret of your Marketo service. + :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -22657,6 +31303,40 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword endpoint: Required. The endpoint of the Marketo server. (i.e. + 123-ABC-321.mktorest.com). + :paramtype endpoint: any + :keyword client_id: Required. The client Id of your Marketo service. + :paramtype client_id: any + :keyword client_secret: The client secret of your Marketo service. + :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(MarketoLinkedService, self).__init__(**kwargs) self.type = 'Marketo' # type: str self.endpoint = kwargs['endpoint'] @@ -22673,30 +31353,30 @@ class MarketoObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -22721,6 +31401,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(MarketoObjectDataset, self).__init__(**kwargs) self.type = 'MarketoObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -22731,32 +31435,32 @@ class MarketoSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -22779,6 +31483,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(MarketoSource, self).__init__(**kwargs) self.type = 'MarketoSource' # type: str self.query = kwargs.get('query', None) @@ -22787,10 +31517,10 @@ def __init__( class MetadataItem(msrest.serialization.Model): """Specify the name and value of custom metadata item. - :param name: Metadata item key name. Type: string (or Expression with resultType string). - :type name: any - :param value: Metadata item value. Type: string (or Expression with resultType string). - :type value: any + :ivar name: Metadata item key name. Type: string (or Expression with resultType string). + :vartype name: any + :ivar value: Metadata item value. Type: string (or Expression with resultType string). + :vartype value: any """ _attribute_map = { @@ -22802,6 +31532,12 @@ def __init__( self, **kwargs ): + """ + :keyword name: Metadata item key name. Type: string (or Expression with resultType string). + :paramtype name: any + :keyword value: Metadata item value. Type: string (or Expression with resultType string). + :paramtype value: any + """ super(MetadataItem, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.value = kwargs.get('value', None) @@ -22812,39 +31548,39 @@ class MicrosoftAccessLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The non-access credential portion of the connection string + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param authentication_type: Type of authentication used to connect to the Microsoft Access as + :vartype connection_string: any + :ivar authentication_type: Type of authentication used to connect to the Microsoft Access as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). - :type authentication_type: any - :param credential: The access credential portion of the connection string specified in + :vartype authentication_type: any + :ivar credential: The access credential portion of the connection string specified in driver-specific property-value format. - :type credential: ~azure.mgmt.datafactory.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or Expression with + :vartype credential: ~azure.mgmt.datafactory.models.SecretBase + :ivar user_name: User name for Basic authentication. Type: string (or Expression with resultType string). - :type user_name: any - :param password: Password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype user_name: any + :ivar password: Password for Basic authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -22871,6 +31607,39 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The non-access credential portion of the connection + string as well as an optional encrypted credential. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword authentication_type: Type of authentication used to connect to the Microsoft Access as + ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with + resultType string). + :paramtype authentication_type: any + :keyword credential: The access credential portion of the connection string specified in + driver-specific property-value format. + :paramtype credential: ~azure.mgmt.datafactory.models.SecretBase + :keyword user_name: User name for Basic authentication. Type: string (or Expression with + resultType string). + :paramtype user_name: any + :keyword password: Password for Basic authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(MicrosoftAccessLinkedService, self).__init__(**kwargs) self.type = 'MicrosoftAccess' # type: str self.connection_string = kwargs['connection_string'] @@ -22886,32 +31655,32 @@ class MicrosoftAccessSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param pre_copy_script: A query to execute before starting the copy. Type: string (or - Expression with resultType string). - :type pre_copy_script: any + :vartype disable_metrics_collection: any + :ivar pre_copy_script: A query to execute before starting the copy. Type: string (or Expression + with resultType string). + :vartype pre_copy_script: any """ _validation = { @@ -22934,6 +31703,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :paramtype pre_copy_script: any + """ super(MicrosoftAccessSink, self).__init__(**kwargs) self.type = 'MicrosoftAccessSink' # type: str self.pre_copy_script = kwargs.get('pre_copy_script', None) @@ -22944,28 +31739,28 @@ class MicrosoftAccessSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: Database query. Type: string (or Expression with resultType string). - :type query: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype disable_metrics_collection: any + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -22987,6 +31782,28 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(MicrosoftAccessSource, self).__init__(**kwargs) self.type = 'MicrosoftAccessSource' # type: str self.query = kwargs.get('query', None) @@ -22998,31 +31815,31 @@ class MicrosoftAccessTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The Microsoft Access table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype table_name: any """ _validation = { @@ -23047,6 +31864,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The Microsoft Access table name. Type: string (or Expression with + resultType string). + :paramtype table_name: any + """ super(MicrosoftAccessTableDataset, self).__init__(**kwargs) self.type = 'MicrosoftAccessTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -23057,31 +31899,31 @@ class MongoDbAtlasCollectionDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param collection: Required. The collection name of the MongoDB Atlas database. Type: string - (or Expression with resultType string). - :type collection: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar collection: Required. The collection name of the MongoDB Atlas database. Type: string (or + Expression with resultType string). + :vartype collection: any """ _validation = { @@ -23107,6 +31949,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword collection: Required. The collection name of the MongoDB Atlas database. Type: string + (or Expression with resultType string). + :paramtype collection: any + """ super(MongoDbAtlasCollectionDataset, self).__init__(**kwargs) self.type = 'MongoDbAtlasCollection' # type: str self.collection = kwargs['collection'] @@ -23117,26 +31984,26 @@ class MongoDbAtlasLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The MongoDB Atlas connection string. Type: string, + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The MongoDB Atlas connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param database: Required. The name of the MongoDB Atlas database that you want to access. - Type: string (or Expression with resultType string). - :type database: any + :vartype connection_string: any + :ivar database: Required. The name of the MongoDB Atlas database that you want to access. Type: + string (or Expression with resultType string). + :vartype database: any """ _validation = { @@ -23160,6 +32027,26 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The MongoDB Atlas connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword database: Required. The name of the MongoDB Atlas database that you want to access. + Type: string (or Expression with resultType string). + :paramtype database: any + """ super(MongoDbAtlasLinkedService, self).__init__(**kwargs) self.type = 'MongoDbAtlas' # type: str self.connection_string = kwargs['connection_string'] @@ -23171,33 +32058,33 @@ class MongoDbAtlasSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + :vartype disable_metrics_collection: any + :ivar write_behavior: Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). - :type write_behavior: any + :vartype write_behavior: any """ _validation = { @@ -23220,6 +32107,33 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword write_behavior: Specifies whether the document with same key to be overwritten + (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :paramtype write_behavior: any + """ super(MongoDbAtlasSink, self).__init__(**kwargs) self.type = 'MongoDbAtlasSink' # type: str self.write_behavior = kwargs.get('write_behavior', None) @@ -23230,40 +32144,40 @@ class MongoDbAtlasSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param filter: Specifies selection filter using query operators. To return all documents in a + :vartype disable_metrics_collection: any + :ivar filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). - :type filter: any - :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties - :param batch_size: Specifies the number of documents to return in each batch of the response + :vartype filter: any + :ivar cursor_methods: Cursor methods for Mongodb query. + :vartype cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :ivar batch_size: Specifies the number of documents to return in each batch of the response from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). - :type batch_size: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype batch_size: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -23288,6 +32202,40 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword filter: Specifies selection filter using query operators. To return all documents in a + collection, omit this parameter or pass an empty document ({}). Type: string (or Expression + with resultType string). + :paramtype filter: any + :keyword cursor_methods: Cursor methods for Mongodb query. + :paramtype cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :keyword batch_size: Specifies the number of documents to return in each batch of the response + from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user + or the application. This property's main purpose is to avoid hit the limitation of response + size. Type: integer (or Expression with resultType integer). + :paramtype batch_size: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(MongoDbAtlasSource, self).__init__(**kwargs) self.type = 'MongoDbAtlasSource' # type: str self.filter = kwargs.get('filter', None) @@ -23302,31 +32250,31 @@ class MongoDbCollectionDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param collection_name: Required. The table name of the MongoDB database. Type: string (or + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar collection_name: Required. The table name of the MongoDB database. Type: string (or Expression with resultType string). - :type collection_name: any + :vartype collection_name: any """ _validation = { @@ -23352,6 +32300,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword collection_name: Required. The table name of the MongoDB database. Type: string (or + Expression with resultType string). + :paramtype collection_name: any + """ super(MongoDbCollectionDataset, self).__init__(**kwargs) self.type = 'MongoDbCollection' # type: str self.collection_name = kwargs['collection_name'] @@ -23360,24 +32333,24 @@ def __init__( class MongoDbCursorMethodsProperties(msrest.serialization.Model): """Cursor methods for Mongodb query. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param project: Specifies the fields to return in the documents that match the query filter. To + :vartype additional_properties: dict[str, any] + :ivar project: Specifies the fields to return in the documents that match the query filter. To return all fields in the matching documents, omit this parameter. Type: string (or Expression with resultType string). - :type project: any - :param sort: Specifies the order in which the query returns matching documents. Type: string - (or Expression with resultType string). Type: string (or Expression with resultType string). - :type sort: any - :param skip: Specifies the how many documents skipped and where MongoDB begins returning + :vartype project: any + :ivar sort: Specifies the order in which the query returns matching documents. Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :vartype sort: any + :ivar skip: Specifies the how many documents skipped and where MongoDB begins returning results. This approach may be useful in implementing paginated results. Type: integer (or Expression with resultType integer). - :type skip: any - :param limit: Specifies the maximum number of documents the server returns. limit() is - analogous to the LIMIT statement in a SQL database. Type: integer (or Expression with - resultType integer). - :type limit: any + :vartype skip: any + :ivar limit: Specifies the maximum number of documents the server returns. limit() is analogous + to the LIMIT statement in a SQL database. Type: integer (or Expression with resultType + integer). + :vartype limit: any """ _attribute_map = { @@ -23392,6 +32365,26 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword project: Specifies the fields to return in the documents that match the query filter. + To return all fields in the matching documents, omit this parameter. Type: string (or + Expression with resultType string). + :paramtype project: any + :keyword sort: Specifies the order in which the query returns matching documents. Type: string + (or Expression with resultType string). Type: string (or Expression with resultType string). + :paramtype sort: any + :keyword skip: Specifies the how many documents skipped and where MongoDB begins returning + results. This approach may be useful in implementing paginated results. Type: integer (or + Expression with resultType integer). + :paramtype skip: any + :keyword limit: Specifies the maximum number of documents the server returns. limit() is + analogous to the LIMIT statement in a SQL database. Type: integer (or Expression with + resultType integer). + :paramtype limit: any + """ super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.project = kwargs.get('project', None) @@ -23405,49 +32398,49 @@ class MongoDbLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param server: Required. The IP address or server name of the MongoDB server. Type: string (or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar server: Required. The IP address or server name of the MongoDB server. Type: string (or Expression with resultType string). - :type server: any - :param authentication_type: The authentication type to be used to connect to the MongoDB + :vartype server: any + :ivar authentication_type: The authentication type to be used to connect to the MongoDB database. Possible values include: "Basic", "Anonymous". - :type authentication_type: str or ~azure.mgmt.datafactory.models.MongoDbAuthenticationType - :param database_name: Required. The name of the MongoDB database that you want to access. Type: + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.MongoDbAuthenticationType + :ivar database_name: Required. The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). - :type database_name: any - :param username: Username for authentication. Type: string (or Expression with resultType + :vartype database_name: any + :ivar username: Username for authentication. Type: string (or Expression with resultType string). - :type username: any - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param auth_source: Database to verify the username and password. Type: string (or Expression + :vartype username: any + :ivar password: Password for authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar auth_source: Database to verify the username and password. Type: string (or Expression with resultType string). - :type auth_source: any - :param port: The TCP port number that the MongoDB server uses to listen for client connections. + :vartype auth_source: any + :ivar port: The TCP port number that the MongoDB server uses to listen for client connections. The default value is 27017. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: any - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + :vartype port: any + :ivar enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. Type: boolean (or Expression with resultType boolean). - :type enable_ssl: any - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + :vartype enable_ssl: any + :ivar allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. Type: boolean (or Expression with resultType boolean). - :type allow_self_signed_server_cert: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype allow_self_signed_server_cert: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -23479,6 +32472,51 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword server: Required. The IP address or server name of the MongoDB server. Type: string + (or Expression with resultType string). + :paramtype server: any + :keyword authentication_type: The authentication type to be used to connect to the MongoDB + database. Possible values include: "Basic", "Anonymous". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.MongoDbAuthenticationType + :keyword database_name: Required. The name of the MongoDB database that you want to access. + Type: string (or Expression with resultType string). + :paramtype database_name: any + :keyword username: Username for authentication. Type: string (or Expression with resultType + string). + :paramtype username: any + :keyword password: Password for authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword auth_source: Database to verify the username and password. Type: string (or Expression + with resultType string). + :paramtype auth_source: any + :keyword port: The TCP port number that the MongoDB server uses to listen for client + connections. The default value is 27017. Type: integer (or Expression with resultType integer), + minimum: 0. + :paramtype port: any + :keyword enable_ssl: Specifies whether the connections to the server are encrypted using SSL. + The default value is false. Type: boolean (or Expression with resultType boolean). + :paramtype enable_ssl: any + :keyword allow_self_signed_server_cert: Specifies whether to allow self-signed certificates + from the server. The default value is false. Type: boolean (or Expression with resultType + boolean). + :paramtype allow_self_signed_server_cert: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(MongoDbLinkedService, self).__init__(**kwargs) self.type = 'MongoDb' # type: str self.server = kwargs['server'] @@ -23498,29 +32536,29 @@ class MongoDbSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression + :vartype disable_metrics_collection: any + :ivar query: Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). - :type query: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -23542,6 +32580,29 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: Database query. Should be a SQL-92 query expression. Type: string (or + Expression with resultType string). + :paramtype query: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(MongoDbSource, self).__init__(**kwargs) self.type = 'MongoDbSource' # type: str self.query = kwargs.get('query', None) @@ -23553,31 +32614,31 @@ class MongoDbV2CollectionDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param collection: Required. The collection name of the MongoDB database. Type: string (or + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar collection: Required. The collection name of the MongoDB database. Type: string (or Expression with resultType string). - :type collection: any + :vartype collection: any """ _validation = { @@ -23603,6 +32664,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword collection: Required. The collection name of the MongoDB database. Type: string (or + Expression with resultType string). + :paramtype collection: any + """ super(MongoDbV2CollectionDataset, self).__init__(**kwargs) self.type = 'MongoDbV2Collection' # type: str self.collection = kwargs['collection'] @@ -23613,25 +32699,25 @@ class MongoDbV2LinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The MongoDB connection string. Type: string, SecureString - or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param database: Required. The name of the MongoDB database that you want to access. Type: + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The MongoDB connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. + :vartype connection_string: any + :ivar database: Required. The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). - :type database: any + :vartype database: any """ _validation = { @@ -23655,6 +32741,25 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The MongoDB connection string. Type: string, SecureString + or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword database: Required. The name of the MongoDB database that you want to access. Type: + string (or Expression with resultType string). + :paramtype database: any + """ super(MongoDbV2LinkedService, self).__init__(**kwargs) self.type = 'MongoDbV2' # type: str self.connection_string = kwargs['connection_string'] @@ -23666,33 +32771,33 @@ class MongoDbV2Sink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + :vartype disable_metrics_collection: any + :ivar write_behavior: Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). - :type write_behavior: any + :vartype write_behavior: any """ _validation = { @@ -23715,6 +32820,33 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword write_behavior: Specifies whether the document with same key to be overwritten + (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :paramtype write_behavior: any + """ super(MongoDbV2Sink, self).__init__(**kwargs) self.type = 'MongoDbV2Sink' # type: str self.write_behavior = kwargs.get('write_behavior', None) @@ -23725,40 +32857,40 @@ class MongoDbV2Source(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param filter: Specifies selection filter using query operators. To return all documents in a + :vartype disable_metrics_collection: any + :ivar filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). - :type filter: any - :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties - :param batch_size: Specifies the number of documents to return in each batch of the response + :vartype filter: any + :ivar cursor_methods: Cursor methods for Mongodb query. + :vartype cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :ivar batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). - :type batch_size: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype batch_size: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -23783,6 +32915,40 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword filter: Specifies selection filter using query operators. To return all documents in a + collection, omit this parameter or pass an empty document ({}). Type: string (or Expression + with resultType string). + :paramtype filter: any + :keyword cursor_methods: Cursor methods for Mongodb query. + :paramtype cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :keyword batch_size: Specifies the number of documents to return in each batch of the response + from MongoDB instance. In most cases, modifying the batch size will not affect the user or the + application. This property's main purpose is to avoid hit the limitation of response size. + Type: integer (or Expression with resultType integer). + :paramtype batch_size: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(MongoDbV2Source, self).__init__(**kwargs) self.type = 'MongoDbV2Source' # type: str self.filter = kwargs.get('filter', None) @@ -23797,27 +32963,27 @@ class MySqlLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The connection string. - :type connection_string: any - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The connection string. + :vartype connection_string: any + :ivar password: The Azure key vault secret reference of password in connection string. + :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -23841,6 +33007,27 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The connection string. + :paramtype connection_string: any + :keyword password: The Azure key vault secret reference of password in connection string. + :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(MySqlLinkedService, self).__init__(**kwargs) self.type = 'MySql' # type: str self.connection_string = kwargs['connection_string'] @@ -23853,31 +33040,31 @@ class MySqlSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: Database query. Type: string (or Expression with resultType string). - :type query: any + :vartype additional_columns: any + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: any """ _validation = { @@ -23900,6 +33087,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: any + """ super(MySqlSource, self).__init__(**kwargs) self.type = 'MySqlSource' # type: str self.query = kwargs.get('query', None) @@ -23910,30 +33122,30 @@ class MySqlTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The MySQL table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The MySQL table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -23958,6 +33170,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The MySQL table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(MySqlTableDataset, self).__init__(**kwargs) self.type = 'MySqlTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -23968,28 +33204,28 @@ class NetezzaLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: An ODBC connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_string: any + :ivar pwd: The Azure key vault secret reference of password in connection string. + :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -24012,6 +33248,28 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword pwd: The Azure key vault secret reference of password in connection string. + :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(NetezzaLinkedService, self).__init__(**kwargs) self.type = 'Netezza' # type: str self.connection_string = kwargs.get('connection_string', None) @@ -24022,17 +33280,17 @@ def __init__( class NetezzaPartitionSettings(msrest.serialization.Model): """The settings that will be leveraged for Netezza source partitioning. - :param partition_column_name: The name of the column in integer type that will be used for + :ivar partition_column_name: The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: any - :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + :vartype partition_column_name: any + :ivar partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_upper_bound: any - :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + :vartype partition_upper_bound: any + :ivar partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_lower_bound: any + :vartype partition_lower_bound: any """ _attribute_map = { @@ -24045,6 +33303,19 @@ def __init__( self, **kwargs ): + """ + :keyword partition_column_name: The name of the column in integer type that will be used for + proceeding range partitioning. Type: string (or Expression with resultType string). + :paramtype partition_column_name: any + :keyword partition_upper_bound: The maximum value of column specified in partitionColumnName + that will be used for proceeding range partitioning. Type: string (or Expression with + resultType string). + :paramtype partition_upper_bound: any + :keyword partition_lower_bound: The minimum value of column specified in partitionColumnName + that will be used for proceeding range partitioning. Type: string (or Expression with + resultType string). + :paramtype partition_lower_bound: any + """ super(NetezzaPartitionSettings, self).__init__(**kwargs) self.partition_column_name = kwargs.get('partition_column_name', None) self.partition_upper_bound = kwargs.get('partition_upper_bound', None) @@ -24056,37 +33327,37 @@ class NetezzaSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any - :param partition_option: The partition mechanism that will be used for Netezza read in - parallel. Possible values include: "None", "DataSlice", "DynamicRange". - :type partition_option: any - :param partition_settings: The settings that will be leveraged for Netezza source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.NetezzaPartitionSettings + :vartype query: any + :ivar partition_option: The partition mechanism that will be used for Netezza read in parallel. + Possible values include: "None", "DataSlice", "DynamicRange". + :vartype partition_option: any + :ivar partition_settings: The settings that will be leveraged for Netezza source partitioning. + :vartype partition_settings: ~azure.mgmt.datafactory.models.NetezzaPartitionSettings """ _validation = { @@ -24111,6 +33382,38 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + :keyword partition_option: The partition mechanism that will be used for Netezza read in + parallel. Possible values include: "None", "DataSlice", "DynamicRange". + :paramtype partition_option: any + :keyword partition_settings: The settings that will be leveraged for Netezza source + partitioning. + :paramtype partition_settings: ~azure.mgmt.datafactory.models.NetezzaPartitionSettings + """ super(NetezzaSource, self).__init__(**kwargs) self.type = 'NetezzaSource' # type: str self.query = kwargs.get('query', None) @@ -24123,37 +33426,37 @@ class NetezzaTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param table: The table name of the Netezza. Type: string (or Expression with resultType + :vartype table_name: any + :ivar table: The table name of the Netezza. Type: string (or Expression with resultType string). - :type table: any - :param schema_type_properties_schema: The schema name of the Netezza. Type: string (or + :vartype table: any + :ivar schema_type_properties_schema: The schema name of the Netezza. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any + :vartype schema_type_properties_schema: any """ _validation = { @@ -24180,6 +33483,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword table: The table name of the Netezza. Type: string (or Expression with resultType + string). + :paramtype table: any + :keyword schema_type_properties_schema: The schema name of the Netezza. Type: string (or + Expression with resultType string). + :paramtype schema_type_properties_schema: any + """ super(NetezzaTableDataset, self).__init__(**kwargs) self.type = 'NetezzaTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -24192,66 +33526,66 @@ class ODataLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param url: Required. The URL of the OData service endpoint. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar url: Required. The URL of the OData service endpoint. Type: string (or Expression with resultType string). - :type url: any - :param authentication_type: Type of authentication used to connect to the OData service. + :vartype url: any + :ivar authentication_type: Type of authentication used to connect to the OData service. Possible values include: "Basic", "Anonymous", "Windows", "AadServicePrincipal", "ManagedServiceIdentity". - :type authentication_type: str or ~azure.mgmt.datafactory.models.ODataAuthenticationType - :param user_name: User name of the OData service. Type: string (or Expression with resultType + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.ODataAuthenticationType + :ivar user_name: User name of the OData service. Type: string (or Expression with resultType string). - :type user_name: any - :param password: Password of the OData service. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param auth_headers: The additional HTTP headers in the request to RESTful API used for + :vartype user_name: any + :ivar password: Password of the OData service. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar auth_headers: The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). - :type auth_headers: any - :param tenant: Specify the tenant information (domain name or tenant ID) under which your + :vartype auth_headers: any + :ivar tenant: Specify the tenant information (domain name or tenant ID) under which your application resides. Type: string (or Expression with resultType string). - :type tenant: any - :param service_principal_id: Specify the application id of your application registered in Azure + :vartype tenant: any + :ivar service_principal_id: Specify the application id of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + :vartype service_principal_id: any + :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: any - :param aad_resource_id: Specify the resource you are requesting authorization to use Directory. + :vartype azure_cloud_type: any + :ivar aad_resource_id: Specify the resource you are requesting authorization to use Directory. Type: string (or Expression with resultType string). - :type aad_resource_id: any - :param aad_service_principal_credential_type: Specify the credential type (key or cert) is used + :vartype aad_resource_id: any + :ivar aad_service_principal_credential_type: Specify the credential type (key or cert) is used for service principal. Possible values include: "ServicePrincipalKey", "ServicePrincipalCert". - :type aad_service_principal_credential_type: str or + :vartype aad_service_principal_credential_type: str or ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType - :param service_principal_key: Specify the secret of your application registered in Azure Active + :ivar service_principal_key: Specify the secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_embedded_cert: Specify the base64 encoded certificate of your + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar service_principal_embedded_cert: Specify the base64 encoded certificate of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). - :type service_principal_embedded_cert: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_embedded_cert_password: Specify the password of your certificate if + :vartype service_principal_embedded_cert: ~azure.mgmt.datafactory.models.SecretBase + :ivar service_principal_embedded_cert_password: Specify the password of your certificate if your certificate has a password and you are using AadServicePrincipal authentication. Type: string (or Expression with resultType string). - :type service_principal_embedded_cert_password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype service_principal_embedded_cert_password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -24286,6 +33620,67 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword url: Required. The URL of the OData service endpoint. Type: string (or Expression with + resultType string). + :paramtype url: any + :keyword authentication_type: Type of authentication used to connect to the OData service. + Possible values include: "Basic", "Anonymous", "Windows", "AadServicePrincipal", + "ManagedServiceIdentity". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.ODataAuthenticationType + :keyword user_name: User name of the OData service. Type: string (or Expression with resultType + string). + :paramtype user_name: any + :keyword password: Password of the OData service. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword auth_headers: The additional HTTP headers in the request to RESTful API used for + authorization. Type: object (or Expression with resultType object). + :paramtype auth_headers: any + :keyword tenant: Specify the tenant information (domain name or tenant ID) under which your + application resides. Type: string (or Expression with resultType string). + :paramtype tenant: any + :keyword service_principal_id: Specify the application id of your application registered in + Azure Active Directory. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. + Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is + the data factory regions’ cloud type. Type: string (or Expression with resultType string). + :paramtype azure_cloud_type: any + :keyword aad_resource_id: Specify the resource you are requesting authorization to use + Directory. Type: string (or Expression with resultType string). + :paramtype aad_resource_id: any + :keyword aad_service_principal_credential_type: Specify the credential type (key or cert) is + used for service principal. Possible values include: "ServicePrincipalKey", + "ServicePrincipalCert". + :paramtype aad_service_principal_credential_type: str or + ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType + :keyword service_principal_key: Specify the secret of your application registered in Azure + Active Directory. Type: string (or Expression with resultType string). + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword service_principal_embedded_cert: Specify the base64 encoded certificate of your + application registered in Azure Active Directory. Type: string (or Expression with resultType + string). + :paramtype service_principal_embedded_cert: ~azure.mgmt.datafactory.models.SecretBase + :keyword service_principal_embedded_cert_password: Specify the password of your certificate if + your certificate has a password and you are using AadServicePrincipal authentication. Type: + string (or Expression with resultType string). + :paramtype service_principal_embedded_cert_password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(ODataLinkedService, self).__init__(**kwargs) self.type = 'OData' # type: str self.url = kwargs['url'] @@ -24309,30 +33704,30 @@ class ODataResourceDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param path: The OData resource path. Type: string (or Expression with resultType string). - :type path: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar path: The OData resource path. Type: string (or Expression with resultType string). + :vartype path: any """ _validation = { @@ -24357,6 +33752,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword path: The OData resource path. Type: string (or Expression with resultType string). + :paramtype path: any + """ super(ODataResourceDataset, self).__init__(**kwargs) self.type = 'ODataResource' # type: str self.path = kwargs.get('path', None) @@ -24367,34 +33786,34 @@ class ODataSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType + :vartype disable_metrics_collection: any + :ivar query: OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :type query: any - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + :vartype query: any + :ivar http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype http_request_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -24417,6 +33836,34 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: OData query. For example, "$top=1". Type: string (or Expression with resultType + string). + :paramtype query: any + :keyword http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the + timeout to get a response, not the timeout to read response data. Default value: 00:05:00. + Type: string (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype http_request_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(ODataSource, self).__init__(**kwargs) self.type = 'ODataSource' # type: str self.query = kwargs.get('query', None) @@ -24429,38 +33876,38 @@ class OdbcLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The non-access credential portion of the connection string + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param authentication_type: Type of authentication used to connect to the ODBC data store. + :vartype connection_string: any + :ivar authentication_type: Type of authentication used to connect to the ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). - :type authentication_type: any - :param credential: The access credential portion of the connection string specified in + :vartype authentication_type: any + :ivar credential: The access credential portion of the connection string specified in driver-specific property-value format. - :type credential: ~azure.mgmt.datafactory.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or Expression with + :vartype credential: ~azure.mgmt.datafactory.models.SecretBase + :ivar user_name: User name for Basic authentication. Type: string (or Expression with resultType string). - :type user_name: any - :param password: Password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype user_name: any + :ivar password: Password for Basic authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -24487,6 +33934,38 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The non-access credential portion of the connection + string as well as an optional encrypted credential. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword authentication_type: Type of authentication used to connect to the ODBC data store. + Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). + :paramtype authentication_type: any + :keyword credential: The access credential portion of the connection string specified in + driver-specific property-value format. + :paramtype credential: ~azure.mgmt.datafactory.models.SecretBase + :keyword user_name: User name for Basic authentication. Type: string (or Expression with + resultType string). + :paramtype user_name: any + :keyword password: Password for Basic authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(OdbcLinkedService, self).__init__(**kwargs) self.type = 'Odbc' # type: str self.connection_string = kwargs['connection_string'] @@ -24502,32 +33981,32 @@ class OdbcSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param pre_copy_script: A query to execute before starting the copy. Type: string (or - Expression with resultType string). - :type pre_copy_script: any + :vartype disable_metrics_collection: any + :ivar pre_copy_script: A query to execute before starting the copy. Type: string (or Expression + with resultType string). + :vartype pre_copy_script: any """ _validation = { @@ -24550,6 +34029,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :paramtype pre_copy_script: any + """ super(OdbcSink, self).__init__(**kwargs) self.type = 'OdbcSink' # type: str self.pre_copy_script = kwargs.get('pre_copy_script', None) @@ -24560,31 +34065,31 @@ class OdbcSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: Database query. Type: string (or Expression with resultType string). - :type query: any + :vartype additional_columns: any + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: any """ _validation = { @@ -24607,6 +34112,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: any + """ super(OdbcSource, self).__init__(**kwargs) self.type = 'OdbcSource' # type: str self.query = kwargs.get('query', None) @@ -24617,30 +34147,30 @@ class OdbcTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The ODBC table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The ODBC table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -24665,6 +34195,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The ODBC table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(OdbcTableDataset, self).__init__(**kwargs) self.type = 'OdbcTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -24675,34 +34229,34 @@ class Office365Dataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: Required. Name of the dataset to extract from Office 365. Type: string (or + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: Required. Name of the dataset to extract from Office 365. Type: string (or Expression with resultType string). - :type table_name: any - :param predicate: A predicate expression that can be used to filter the specific rows to - extract from Office 365. Type: string (or Expression with resultType string). - :type predicate: any + :vartype table_name: any + :ivar predicate: A predicate expression that can be used to filter the specific rows to extract + from Office 365. Type: string (or Expression with resultType string). + :vartype predicate: any """ _validation = { @@ -24729,6 +34283,34 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: Required. Name of the dataset to extract from Office 365. Type: string (or + Expression with resultType string). + :paramtype table_name: any + :keyword predicate: A predicate expression that can be used to filter the specific rows to + extract from Office 365. Type: string (or Expression with resultType string). + :paramtype predicate: any + """ super(Office365Dataset, self).__init__(**kwargs) self.type = 'Office365Table' # type: str self.table_name = kwargs['table_name'] @@ -24740,34 +34322,34 @@ class Office365LinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param office365_tenant_id: Required. Azure tenant ID to which the Office 365 account belongs. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar office365_tenant_id: Required. Azure tenant ID to which the Office 365 account belongs. Type: string (or Expression with resultType string). - :type office365_tenant_id: any - :param service_principal_tenant_id: Required. Specify the tenant information under which your + :vartype office365_tenant_id: any + :ivar service_principal_tenant_id: Required. Specify the tenant information under which your Azure AD web application resides. Type: string (or Expression with resultType string). - :type service_principal_tenant_id: any - :param service_principal_id: Required. Specify the application's client ID. Type: string (or + :vartype service_principal_tenant_id: any + :ivar service_principal_id: Required. Specify the application's client ID. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: Required. Specify the application's key. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype service_principal_id: any + :ivar service_principal_key: Required. Specify the application's key. + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -24796,6 +34378,34 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword office365_tenant_id: Required. Azure tenant ID to which the Office 365 account + belongs. Type: string (or Expression with resultType string). + :paramtype office365_tenant_id: any + :keyword service_principal_tenant_id: Required. Specify the tenant information under which your + Azure AD web application resides. Type: string (or Expression with resultType string). + :paramtype service_principal_tenant_id: any + :keyword service_principal_id: Required. Specify the application's client ID. Type: string (or + Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: Required. Specify the application's key. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(Office365LinkedService, self).__init__(**kwargs) self.type = 'Office365' # type: str self.office365_tenant_id = kwargs['office365_tenant_id'] @@ -24810,42 +34420,42 @@ class Office365Source(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param allowed_groups: The groups containing all the users. Type: array of strings (or + :vartype disable_metrics_collection: any + :ivar allowed_groups: The groups containing all the users. Type: array of strings (or Expression with resultType array of strings). - :type allowed_groups: any - :param user_scope_filter_uri: The user scope uri. Type: string (or Expression with resultType + :vartype allowed_groups: any + :ivar user_scope_filter_uri: The user scope uri. Type: string (or Expression with resultType string). - :type user_scope_filter_uri: any - :param date_filter_column: The Column to apply the :code:`` and + :vartype user_scope_filter_uri: any + :ivar date_filter_column: The Column to apply the :code:`` and :code:``. Type: string (or Expression with resultType string). - :type date_filter_column: any - :param start_time: Start time of the requested range for this dataset. Type: string (or + :vartype date_filter_column: any + :ivar start_time: Start time of the requested range for this dataset. Type: string (or Expression with resultType string). - :type start_time: any - :param end_time: End time of the requested range for this dataset. Type: string (or Expression + :vartype start_time: any + :ivar end_time: End time of the requested range for this dataset. Type: string (or Expression with resultType string). - :type end_time: any - :param output_columns: The columns to be read out from the Office 365 table. Type: array of + :vartype end_time: any + :ivar output_columns: The columns to be read out from the Office 365 table. Type: array of objects (or Expression with resultType array of objects). Example: [ { "name": "Id" }, { "name": "CreatedDateTime" } ]. - :type output_columns: any + :vartype output_columns: any """ _validation = { @@ -24871,6 +34481,42 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword allowed_groups: The groups containing all the users. Type: array of strings (or + Expression with resultType array of strings). + :paramtype allowed_groups: any + :keyword user_scope_filter_uri: The user scope uri. Type: string (or Expression with resultType + string). + :paramtype user_scope_filter_uri: any + :keyword date_filter_column: The Column to apply the :code:`` and + :code:``. Type: string (or Expression with resultType string). + :paramtype date_filter_column: any + :keyword start_time: Start time of the requested range for this dataset. Type: string (or + Expression with resultType string). + :paramtype start_time: any + :keyword end_time: End time of the requested range for this dataset. Type: string (or + Expression with resultType string). + :paramtype end_time: any + :keyword output_columns: The columns to be read out from the Office 365 table. Type: array of + objects (or Expression with resultType array of objects). Example: [ { "name": "Id" }, { + "name": "CreatedDateTime" } ]. + :paramtype output_columns: any + """ super(Office365Source, self).__init__(**kwargs) self.type = 'Office365Source' # type: str self.allowed_groups = kwargs.get('allowed_groups', None) @@ -24884,14 +34530,14 @@ def __init__( class Operation(msrest.serialization.Model): """Azure Data Factory API operation definition. - :param name: Operation name: {provider}/{resource}/{operation}. - :type name: str - :param origin: The intended executor of the operation. - :type origin: str - :param display: Metadata associated with the operation. - :type display: ~azure.mgmt.datafactory.models.OperationDisplay - :param service_specification: Details about a service operation. - :type service_specification: ~azure.mgmt.datafactory.models.OperationServiceSpecification + :ivar name: Operation name: {provider}/{resource}/{operation}. + :vartype name: str + :ivar origin: The intended executor of the operation. + :vartype origin: str + :ivar display: Metadata associated with the operation. + :vartype display: ~azure.mgmt.datafactory.models.OperationDisplay + :ivar service_specification: Details about a service operation. + :vartype service_specification: ~azure.mgmt.datafactory.models.OperationServiceSpecification """ _attribute_map = { @@ -24905,6 +34551,16 @@ def __init__( self, **kwargs ): + """ + :keyword name: Operation name: {provider}/{resource}/{operation}. + :paramtype name: str + :keyword origin: The intended executor of the operation. + :paramtype origin: str + :keyword display: Metadata associated with the operation. + :paramtype display: ~azure.mgmt.datafactory.models.OperationDisplay + :keyword service_specification: Details about a service operation. + :paramtype service_specification: ~azure.mgmt.datafactory.models.OperationServiceSpecification + """ super(Operation, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.origin = kwargs.get('origin', None) @@ -24915,14 +34571,14 @@ def __init__( class OperationDisplay(msrest.serialization.Model): """Metadata associated with the operation. - :param description: The description of the operation. - :type description: str - :param provider: The name of the provider. - :type provider: str - :param resource: The name of the resource type on which the operation is performed. - :type resource: str - :param operation: The type of operation: get, read, delete, etc. - :type operation: str + :ivar description: The description of the operation. + :vartype description: str + :ivar provider: The name of the provider. + :vartype provider: str + :ivar resource: The name of the resource type on which the operation is performed. + :vartype resource: str + :ivar operation: The type of operation: get, read, delete, etc. + :vartype operation: str """ _attribute_map = { @@ -24936,6 +34592,16 @@ def __init__( self, **kwargs ): + """ + :keyword description: The description of the operation. + :paramtype description: str + :keyword provider: The name of the provider. + :paramtype provider: str + :keyword resource: The name of the resource type on which the operation is performed. + :paramtype resource: str + :keyword operation: The type of operation: get, read, delete, etc. + :paramtype operation: str + """ super(OperationDisplay, self).__init__(**kwargs) self.description = kwargs.get('description', None) self.provider = kwargs.get('provider', None) @@ -24946,10 +34612,10 @@ def __init__( class OperationListResponse(msrest.serialization.Model): """A list of operations that can be performed by the Data Factory service. - :param value: List of Data Factory operations supported by the Data Factory resource provider. - :type value: list[~azure.mgmt.datafactory.models.Operation] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: List of Data Factory operations supported by the Data Factory resource provider. + :vartype value: list[~azure.mgmt.datafactory.models.Operation] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _attribute_map = { @@ -24961,6 +34627,13 @@ def __init__( self, **kwargs ): + """ + :keyword value: List of Data Factory operations supported by the Data Factory resource + provider. + :paramtype value: list[~azure.mgmt.datafactory.models.Operation] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(OperationListResponse, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = kwargs.get('next_link', None) @@ -24969,12 +34642,12 @@ def __init__( class OperationLogSpecification(msrest.serialization.Model): """Details about an operation related to logs. - :param name: The name of the log category. - :type name: str - :param display_name: Localized display name. - :type display_name: str - :param blob_duration: Blobs created in the customer storage account, per hour. - :type blob_duration: str + :ivar name: The name of the log category. + :vartype name: str + :ivar display_name: Localized display name. + :vartype display_name: str + :ivar blob_duration: Blobs created in the customer storage account, per hour. + :vartype blob_duration: str """ _attribute_map = { @@ -24987,6 +34660,14 @@ def __init__( self, **kwargs ): + """ + :keyword name: The name of the log category. + :paramtype name: str + :keyword display_name: Localized display name. + :paramtype display_name: str + :keyword blob_duration: Blobs created in the customer storage account, per hour. + :paramtype blob_duration: str + """ super(OperationLogSpecification, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.display_name = kwargs.get('display_name', None) @@ -24996,10 +34677,10 @@ def __init__( class OperationMetricAvailability(msrest.serialization.Model): """Defines how often data for a metric becomes available. - :param time_grain: The granularity for the metric. - :type time_grain: str - :param blob_duration: Blob created in the customer storage account, per hour. - :type blob_duration: str + :ivar time_grain: The granularity for the metric. + :vartype time_grain: str + :ivar blob_duration: Blob created in the customer storage account, per hour. + :vartype blob_duration: str """ _attribute_map = { @@ -25011,6 +34692,12 @@ def __init__( self, **kwargs ): + """ + :keyword time_grain: The granularity for the metric. + :paramtype time_grain: str + :keyword blob_duration: Blob created in the customer storage account, per hour. + :paramtype blob_duration: str + """ super(OperationMetricAvailability, self).__init__(**kwargs) self.time_grain = kwargs.get('time_grain', None) self.blob_duration = kwargs.get('blob_duration', None) @@ -25019,12 +34706,12 @@ def __init__( class OperationMetricDimension(msrest.serialization.Model): """Defines the metric dimension. - :param name: The name of the dimension for the metric. - :type name: str - :param display_name: The display name of the metric dimension. - :type display_name: str - :param to_be_exported_for_shoebox: Whether the dimension should be exported to Azure Monitor. - :type to_be_exported_for_shoebox: bool + :ivar name: The name of the dimension for the metric. + :vartype name: str + :ivar display_name: The display name of the metric dimension. + :vartype display_name: str + :ivar to_be_exported_for_shoebox: Whether the dimension should be exported to Azure Monitor. + :vartype to_be_exported_for_shoebox: bool """ _attribute_map = { @@ -25037,6 +34724,14 @@ def __init__( self, **kwargs ): + """ + :keyword name: The name of the dimension for the metric. + :paramtype name: str + :keyword display_name: The display name of the metric dimension. + :paramtype display_name: str + :keyword to_be_exported_for_shoebox: Whether the dimension should be exported to Azure Monitor. + :paramtype to_be_exported_for_shoebox: bool + """ super(OperationMetricDimension, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.display_name = kwargs.get('display_name', None) @@ -25046,26 +34741,26 @@ def __init__( class OperationMetricSpecification(msrest.serialization.Model): """Details about an operation related to metrics. - :param name: The name of the metric. - :type name: str - :param display_name: Localized display name of the metric. - :type display_name: str - :param display_description: The description of the metric. - :type display_description: str - :param unit: The unit that the metric is measured in. - :type unit: str - :param aggregation_type: The type of metric aggregation. - :type aggregation_type: str - :param enable_regional_mdm_account: Whether or not the service is using regional MDM accounts. - :type enable_regional_mdm_account: str - :param source_mdm_account: The name of the MDM account. - :type source_mdm_account: str - :param source_mdm_namespace: The name of the MDM namespace. - :type source_mdm_namespace: str - :param availabilities: Defines how often data for metrics becomes available. - :type availabilities: list[~azure.mgmt.datafactory.models.OperationMetricAvailability] - :param dimensions: Defines the metric dimension. - :type dimensions: list[~azure.mgmt.datafactory.models.OperationMetricDimension] + :ivar name: The name of the metric. + :vartype name: str + :ivar display_name: Localized display name of the metric. + :vartype display_name: str + :ivar display_description: The description of the metric. + :vartype display_description: str + :ivar unit: The unit that the metric is measured in. + :vartype unit: str + :ivar aggregation_type: The type of metric aggregation. + :vartype aggregation_type: str + :ivar enable_regional_mdm_account: Whether or not the service is using regional MDM accounts. + :vartype enable_regional_mdm_account: str + :ivar source_mdm_account: The name of the MDM account. + :vartype source_mdm_account: str + :ivar source_mdm_namespace: The name of the MDM namespace. + :vartype source_mdm_namespace: str + :ivar availabilities: Defines how often data for metrics becomes available. + :vartype availabilities: list[~azure.mgmt.datafactory.models.OperationMetricAvailability] + :ivar dimensions: Defines the metric dimension. + :vartype dimensions: list[~azure.mgmt.datafactory.models.OperationMetricDimension] """ _attribute_map = { @@ -25085,6 +34780,29 @@ def __init__( self, **kwargs ): + """ + :keyword name: The name of the metric. + :paramtype name: str + :keyword display_name: Localized display name of the metric. + :paramtype display_name: str + :keyword display_description: The description of the metric. + :paramtype display_description: str + :keyword unit: The unit that the metric is measured in. + :paramtype unit: str + :keyword aggregation_type: The type of metric aggregation. + :paramtype aggregation_type: str + :keyword enable_regional_mdm_account: Whether or not the service is using regional MDM + accounts. + :paramtype enable_regional_mdm_account: str + :keyword source_mdm_account: The name of the MDM account. + :paramtype source_mdm_account: str + :keyword source_mdm_namespace: The name of the MDM namespace. + :paramtype source_mdm_namespace: str + :keyword availabilities: Defines how often data for metrics becomes available. + :paramtype availabilities: list[~azure.mgmt.datafactory.models.OperationMetricAvailability] + :keyword dimensions: Defines the metric dimension. + :paramtype dimensions: list[~azure.mgmt.datafactory.models.OperationMetricDimension] + """ super(OperationMetricSpecification, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.display_name = kwargs.get('display_name', None) @@ -25101,10 +34819,11 @@ def __init__( class OperationServiceSpecification(msrest.serialization.Model): """Details about a service operation. - :param log_specifications: Details about operations related to logs. - :type log_specifications: list[~azure.mgmt.datafactory.models.OperationLogSpecification] - :param metric_specifications: Details about operations related to metrics. - :type metric_specifications: list[~azure.mgmt.datafactory.models.OperationMetricSpecification] + :ivar log_specifications: Details about operations related to logs. + :vartype log_specifications: list[~azure.mgmt.datafactory.models.OperationLogSpecification] + :ivar metric_specifications: Details about operations related to metrics. + :vartype metric_specifications: + list[~azure.mgmt.datafactory.models.OperationMetricSpecification] """ _attribute_map = { @@ -25116,6 +34835,13 @@ def __init__( self, **kwargs ): + """ + :keyword log_specifications: Details about operations related to logs. + :paramtype log_specifications: list[~azure.mgmt.datafactory.models.OperationLogSpecification] + :keyword metric_specifications: Details about operations related to metrics. + :paramtype metric_specifications: + list[~azure.mgmt.datafactory.models.OperationMetricSpecification] + """ super(OperationServiceSpecification, self).__init__(**kwargs) self.log_specifications = kwargs.get('log_specifications', None) self.metric_specifications = kwargs.get('metric_specifications', None) @@ -25126,34 +34852,34 @@ class OracleCloudStorageLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param access_key_id: The access key identifier of the Oracle Cloud Storage Identity and Access + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar access_key_id: The access key identifier of the Oracle Cloud Storage Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). - :type access_key_id: any - :param secret_access_key: The secret access key of the Oracle Cloud Storage Identity and Access + :vartype access_key_id: any + :ivar secret_access_key: The secret access key of the Oracle Cloud Storage Identity and Access Management (IAM) user. - :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_url: This value specifies the endpoint to access with the Oracle Cloud Storage + :vartype secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar service_url: This value specifies the endpoint to access with the Oracle Cloud Storage Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). - :type service_url: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype service_url: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -25177,6 +34903,34 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword access_key_id: The access key identifier of the Oracle Cloud Storage Identity and + Access Management (IAM) user. Type: string (or Expression with resultType string). + :paramtype access_key_id: any + :keyword secret_access_key: The secret access key of the Oracle Cloud Storage Identity and + Access Management (IAM) user. + :paramtype secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword service_url: This value specifies the endpoint to access with the Oracle Cloud Storage + Connector. This is an optional property; change it only if you want to try a different service + endpoint or want to switch between https and http. Type: string (or Expression with resultType + string). + :paramtype service_url: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(OracleCloudStorageLinkedService, self).__init__(**kwargs) self.type = 'OracleCloudStorage' # type: str self.access_key_id = kwargs.get('access_key_id', None) @@ -25190,23 +34944,23 @@ class OracleCloudStorageLocation(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any - :param bucket_name: Specify the bucketName of Oracle Cloud Storage. Type: string (or Expression + :vartype file_name: any + :ivar bucket_name: Specify the bucketName of Oracle Cloud Storage. Type: string (or Expression with resultType string). - :type bucket_name: any - :param version: Specify the version of Oracle Cloud Storage. Type: string (or Expression with + :vartype bucket_name: any + :ivar version: Specify the version of Oracle Cloud Storage. Type: string (or Expression with resultType string). - :type version: any + :vartype version: any """ _validation = { @@ -25226,6 +34980,23 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + :keyword bucket_name: Specify the bucketName of Oracle Cloud Storage. Type: string (or + Expression with resultType string). + :paramtype bucket_name: any + :keyword version: Specify the version of Oracle Cloud Storage. Type: string (or Expression with + resultType string). + :paramtype version: any + """ super(OracleCloudStorageLocation, self).__init__(**kwargs) self.type = 'OracleCloudStorageLocation' # type: str self.bucket_name = kwargs.get('bucket_name', None) @@ -25237,47 +35008,47 @@ class OracleCloudStorageReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param wildcard_folder_path: Oracle Cloud Storage wildcardFolderPath. Type: string (or + :vartype recursive: any + :ivar wildcard_folder_path: Oracle Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: any - :param wildcard_file_name: Oracle Cloud Storage wildcardFileName. Type: string (or Expression + :vartype wildcard_folder_path: any + :ivar wildcard_file_name: Oracle Cloud Storage wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: any - :param prefix: The prefix filter for the Oracle Cloud Storage object name. Type: string (or + :vartype wildcard_file_name: any + :ivar prefix: The prefix filter for the Oracle Cloud Storage object name. Type: string (or Expression with resultType string). - :type prefix: any - :param file_list_path: Point to a text file that lists each file (relative path to the path + :vartype prefix: any + :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype file_list_path: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any - :param delete_files_after_completion: Indicates whether the source files need to be deleted + :vartype partition_root_path: any + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: any - :param modified_datetime_start: The start of file's modified datetime. Type: string (or + :vartype delete_files_after_completion: any + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any + :vartype modified_datetime_end: any """ _validation = { @@ -25305,6 +35076,47 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword wildcard_folder_path: Oracle Cloud Storage wildcardFolderPath. Type: string (or + Expression with resultType string). + :paramtype wildcard_folder_path: any + :keyword wildcard_file_name: Oracle Cloud Storage wildcardFileName. Type: string (or Expression + with resultType string). + :paramtype wildcard_file_name: any + :keyword prefix: The prefix filter for the Oracle Cloud Storage object name. Type: string (or + Expression with resultType string). + :paramtype prefix: any + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: any + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + """ super(OracleCloudStorageReadSettings, self).__init__(**kwargs) self.type = 'OracleCloudStorageReadSettings' # type: str self.recursive = kwargs.get('recursive', None) @@ -25324,28 +35136,28 @@ class OracleLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_string: any + :ivar password: The Azure key vault secret reference of password in connection string. + :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -25369,6 +35181,28 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword password: The Azure key vault secret reference of password in connection string. + :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(OracleLinkedService, self).__init__(**kwargs) self.type = 'Oracle' # type: str self.connection_string = kwargs['connection_string'] @@ -25379,19 +35213,19 @@ def __init__( class OraclePartitionSettings(msrest.serialization.Model): """The settings that will be leveraged for Oracle source partitioning. - :param partition_names: Names of the physical partitions of Oracle table. - :type partition_names: any - :param partition_column_name: The name of the column in integer type that will be used for + :ivar partition_names: Names of the physical partitions of Oracle table. + :vartype partition_names: any + :ivar partition_column_name: The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: any - :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + :vartype partition_column_name: any + :ivar partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_upper_bound: any - :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + :vartype partition_upper_bound: any + :ivar partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_lower_bound: any + :vartype partition_lower_bound: any """ _attribute_map = { @@ -25405,6 +35239,21 @@ def __init__( self, **kwargs ): + """ + :keyword partition_names: Names of the physical partitions of Oracle table. + :paramtype partition_names: any + :keyword partition_column_name: The name of the column in integer type that will be used for + proceeding range partitioning. Type: string (or Expression with resultType string). + :paramtype partition_column_name: any + :keyword partition_upper_bound: The maximum value of column specified in partitionColumnName + that will be used for proceeding range partitioning. Type: string (or Expression with + resultType string). + :paramtype partition_upper_bound: any + :keyword partition_lower_bound: The minimum value of column specified in partitionColumnName + that will be used for proceeding range partitioning. Type: string (or Expression with + resultType string). + :paramtype partition_lower_bound: any + """ super(OraclePartitionSettings, self).__init__(**kwargs) self.partition_names = kwargs.get('partition_names', None) self.partition_column_name = kwargs.get('partition_column_name', None) @@ -25417,41 +35266,41 @@ class OracleServiceCloudLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. The URL of the Oracle Service Cloud instance. - :type host: any - :param username: Required. The user name that you use to access Oracle Service Cloud server. - :type username: any - :param password: Required. The password corresponding to the user name that you provided in the + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. The URL of the Oracle Service Cloud instance. + :vartype host: any + :ivar username: Required. The user name that you use to access Oracle Service Cloud server. + :vartype username: any + :ivar password: Required. The password corresponding to the user name that you provided in the username key. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -25481,6 +35330,41 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. The URL of the Oracle Service Cloud instance. + :paramtype host: any + :keyword username: Required. The user name that you use to access Oracle Service Cloud server. + :paramtype username: any + :keyword password: Required. The password corresponding to the user name that you provided in + the username key. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. Type: boolean (or Expression with resultType boolean). + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. Type: boolean (or Expression with resultType + boolean). + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(OracleServiceCloudLinkedService, self).__init__(**kwargs) self.type = 'OracleServiceCloud' # type: str self.host = kwargs['host'] @@ -25497,30 +35381,30 @@ class OracleServiceCloudObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -25545,6 +35429,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(OracleServiceCloudObjectDataset, self).__init__(**kwargs) self.type = 'OracleServiceCloudObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -25555,32 +35463,32 @@ class OracleServiceCloudSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -25603,6 +35511,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(OracleServiceCloudSource, self).__init__(**kwargs) self.type = 'OracleServiceCloudSource' # type: str self.query = kwargs.get('query', None) @@ -25613,32 +35547,32 @@ class OracleSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + :vartype disable_metrics_collection: any + :ivar pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: any + :vartype pre_copy_script: any """ _validation = { @@ -25661,6 +35595,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :paramtype pre_copy_script: any + """ super(OracleSink, self).__init__(**kwargs) self.type = 'OracleSink' # type: str self.pre_copy_script = kwargs.get('pre_copy_script', None) @@ -25671,37 +35631,37 @@ class OracleSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType + :vartype disable_metrics_collection: any + :ivar oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType string). - :type oracle_reader_query: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype oracle_reader_query: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param partition_option: The partition mechanism that will be used for Oracle read in parallel. + :vartype query_timeout: any + :ivar partition_option: The partition mechanism that will be used for Oracle read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: any - :param partition_settings: The settings that will be leveraged for Oracle source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.OraclePartitionSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype partition_option: any + :ivar partition_settings: The settings that will be leveraged for Oracle source partitioning. + :vartype partition_settings: ~azure.mgmt.datafactory.models.OraclePartitionSettings + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -25726,6 +35686,38 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType + string). + :paramtype oracle_reader_query: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword partition_option: The partition mechanism that will be used for Oracle read in + parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :paramtype partition_option: any + :keyword partition_settings: The settings that will be leveraged for Oracle source + partitioning. + :paramtype partition_settings: ~azure.mgmt.datafactory.models.OraclePartitionSettings + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(OracleSource, self).__init__(**kwargs) self.type = 'OracleSource' # type: str self.oracle_reader_query = kwargs.get('oracle_reader_query', None) @@ -25740,37 +35732,37 @@ class OracleTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param schema_type_properties_schema: The schema name of the on-premises Oracle database. Type: + :vartype table_name: any + :ivar schema_type_properties_schema: The schema name of the on-premises Oracle database. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any - :param table: The table name of the on-premises Oracle database. Type: string (or Expression + :vartype schema_type_properties_schema: any + :ivar table: The table name of the on-premises Oracle database. Type: string (or Expression with resultType string). - :type table: any + :vartype table: any """ _validation = { @@ -25797,6 +35789,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword schema_type_properties_schema: The schema name of the on-premises Oracle database. + Type: string (or Expression with resultType string). + :paramtype schema_type_properties_schema: any + :keyword table: The table name of the on-premises Oracle database. Type: string (or Expression + with resultType string). + :paramtype table: any + """ super(OracleTableDataset, self).__init__(**kwargs) self.type = 'OracleTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -25809,33 +35832,33 @@ class OrcDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param location: The location of the ORC data storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param orc_compression_codec: The data orcCompressionCodec. Type: string (or Expression with + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar location: The location of the ORC data storage. + :vartype location: ~azure.mgmt.datafactory.models.DatasetLocation + :ivar orc_compression_codec: The data orcCompressionCodec. Type: string (or Expression with resultType string). - :type orc_compression_codec: any + :vartype orc_compression_codec: any """ _validation = { @@ -25861,6 +35884,33 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword location: The location of the ORC data storage. + :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation + :keyword orc_compression_codec: The data orcCompressionCodec. Type: string (or Expression with + resultType string). + :paramtype orc_compression_codec: any + """ super(OrcDataset, self).__init__(**kwargs) self.type = 'Orc' # type: str self.location = kwargs.get('location', None) @@ -25872,15 +35922,15 @@ class OrcFormat(DatasetStorageFormat): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage format.Constant filled by server. - :type type: str - :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: any - :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: any + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage format.Constant filled by server. + :vartype type: str + :ivar serializer: Serializer. Type: string (or Expression with resultType string). + :vartype serializer: any + :ivar deserializer: Deserializer. Type: string (or Expression with resultType string). + :vartype deserializer: any """ _validation = { @@ -25898,6 +35948,15 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword serializer: Serializer. Type: string (or Expression with resultType string). + :paramtype serializer: any + :keyword deserializer: Deserializer. Type: string (or Expression with resultType string). + :paramtype deserializer: any + """ super(OrcFormat, self).__init__(**kwargs) self.type = 'OrcFormat' # type: str @@ -25907,33 +35966,33 @@ class OrcSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: ORC store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: ORC format settings. - :type format_settings: ~azure.mgmt.datafactory.models.OrcWriteSettings + :vartype disable_metrics_collection: any + :ivar store_settings: ORC store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :ivar format_settings: ORC format settings. + :vartype format_settings: ~azure.mgmt.datafactory.models.OrcWriteSettings """ _validation = { @@ -25957,6 +36016,33 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: ORC store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :keyword format_settings: ORC format settings. + :paramtype format_settings: ~azure.mgmt.datafactory.models.OrcWriteSettings + """ super(OrcSink, self).__init__(**kwargs) self.type = 'OrcSink' # type: str self.store_settings = kwargs.get('store_settings', None) @@ -25968,28 +36054,28 @@ class OrcSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: ORC store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype disable_metrics_collection: any + :ivar store_settings: ORC store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -26011,6 +36097,28 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: ORC store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(OrcSource, self).__init__(**kwargs) self.type = 'OrcSource' # type: str self.store_settings = kwargs.get('store_settings', None) @@ -26022,18 +36130,18 @@ class OrcWriteSettings(FormatWriteSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str + :ivar max_rows_per_file: Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). - :type max_rows_per_file: any - :param file_name_prefix: Specifies the file name pattern + :vartype max_rows_per_file: any + :ivar file_name_prefix: Specifies the file name pattern :code:``_:code:``.:code:`` when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). - :type file_name_prefix: any + :vartype file_name_prefix: any """ _validation = { @@ -26051,6 +36159,18 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_rows_per_file: Limit the written file's row count to be smaller than or equal to + the specified count. Type: integer (or Expression with resultType integer). + :paramtype max_rows_per_file: any + :keyword file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :paramtype file_name_prefix: any + """ super(OrcWriteSettings, self).__init__(**kwargs) self.type = 'OrcWriteSettings' # type: str self.max_rows_per_file = kwargs.get('max_rows_per_file', None) @@ -26062,10 +36182,10 @@ class PackageStore(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param name: Required. The name of the package store. - :type name: str - :param package_store_linked_service: Required. The package store linked service reference. - :type package_store_linked_service: ~azure.mgmt.datafactory.models.EntityReference + :ivar name: Required. The name of the package store. + :vartype name: str + :ivar package_store_linked_service: Required. The package store linked service reference. + :vartype package_store_linked_service: ~azure.mgmt.datafactory.models.EntityReference """ _validation = { @@ -26082,6 +36202,12 @@ def __init__( self, **kwargs ): + """ + :keyword name: Required. The name of the package store. + :paramtype name: str + :keyword package_store_linked_service: Required. The package store linked service reference. + :paramtype package_store_linked_service: ~azure.mgmt.datafactory.models.EntityReference + """ super(PackageStore, self).__init__(**kwargs) self.name = kwargs['name'] self.package_store_linked_service = kwargs['package_store_linked_service'] @@ -26092,11 +36218,11 @@ class ParameterSpecification(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. Parameter type. Possible values include: "Object", "String", "Int", + :ivar type: Required. Parameter type. Possible values include: "Object", "String", "Int", "Float", "Bool", "Array", "SecureString". - :type type: str or ~azure.mgmt.datafactory.models.ParameterType - :param default_value: Default value of parameter. - :type default_value: any + :vartype type: str or ~azure.mgmt.datafactory.models.ParameterType + :ivar default_value: Default value of parameter. + :vartype default_value: any """ _validation = { @@ -26112,6 +36238,13 @@ def __init__( self, **kwargs ): + """ + :keyword type: Required. Parameter type. Possible values include: "Object", "String", "Int", + "Float", "Bool", "Array", "SecureString". + :paramtype type: str or ~azure.mgmt.datafactory.models.ParameterType + :keyword default_value: Default value of parameter. + :paramtype default_value: any + """ super(ParameterSpecification, self).__init__(**kwargs) self.type = kwargs['type'] self.default_value = kwargs.get('default_value', None) @@ -26122,33 +36255,33 @@ class ParquetDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param location: The location of the parquet storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param compression_codec: The data compressionCodec. Type: string (or Expression with - resultType string). - :type compression_codec: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar location: The location of the parquet storage. + :vartype location: ~azure.mgmt.datafactory.models.DatasetLocation + :ivar compression_codec: The data compressionCodec. Type: string (or Expression with resultType + string). + :vartype compression_codec: any """ _validation = { @@ -26174,6 +36307,33 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword location: The location of the parquet storage. + :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation + :keyword compression_codec: The data compressionCodec. Type: string (or Expression with + resultType string). + :paramtype compression_codec: any + """ super(ParquetDataset, self).__init__(**kwargs) self.type = 'Parquet' # type: str self.location = kwargs.get('location', None) @@ -26185,15 +36345,15 @@ class ParquetFormat(DatasetStorageFormat): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage format.Constant filled by server. - :type type: str - :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: any - :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: any + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage format.Constant filled by server. + :vartype type: str + :ivar serializer: Serializer. Type: string (or Expression with resultType string). + :vartype serializer: any + :ivar deserializer: Deserializer. Type: string (or Expression with resultType string). + :vartype deserializer: any """ _validation = { @@ -26211,6 +36371,15 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword serializer: Serializer. Type: string (or Expression with resultType string). + :paramtype serializer: any + :keyword deserializer: Deserializer. Type: string (or Expression with resultType string). + :paramtype deserializer: any + """ super(ParquetFormat, self).__init__(**kwargs) self.type = 'ParquetFormat' # type: str @@ -26220,33 +36389,33 @@ class ParquetSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: Parquet store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: Parquet format settings. - :type format_settings: ~azure.mgmt.datafactory.models.ParquetWriteSettings + :vartype disable_metrics_collection: any + :ivar store_settings: Parquet store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :ivar format_settings: Parquet format settings. + :vartype format_settings: ~azure.mgmt.datafactory.models.ParquetWriteSettings """ _validation = { @@ -26270,6 +36439,33 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: Parquet store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :keyword format_settings: Parquet format settings. + :paramtype format_settings: ~azure.mgmt.datafactory.models.ParquetWriteSettings + """ super(ParquetSink, self).__init__(**kwargs) self.type = 'ParquetSink' # type: str self.store_settings = kwargs.get('store_settings', None) @@ -26281,28 +36477,28 @@ class ParquetSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: Parquet store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype disable_metrics_collection: any + :ivar store_settings: Parquet store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -26324,6 +36520,28 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: Parquet store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(ParquetSource, self).__init__(**kwargs) self.type = 'ParquetSource' # type: str self.store_settings = kwargs.get('store_settings', None) @@ -26335,18 +36553,18 @@ class ParquetWriteSettings(FormatWriteSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str + :ivar max_rows_per_file: Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). - :type max_rows_per_file: any - :param file_name_prefix: Specifies the file name pattern + :vartype max_rows_per_file: any + :ivar file_name_prefix: Specifies the file name pattern :code:``_:code:``.:code:`` when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). - :type file_name_prefix: any + :vartype file_name_prefix: any """ _validation = { @@ -26364,6 +36582,18 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_rows_per_file: Limit the written file's row count to be smaller than or equal to + the specified count. Type: integer (or Expression with resultType integer). + :paramtype max_rows_per_file: any + :keyword file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :paramtype file_name_prefix: any + """ super(ParquetWriteSettings, self).__init__(**kwargs) self.type = 'ParquetWriteSettings' # type: str self.max_rows_per_file = kwargs.get('max_rows_per_file', None) @@ -26375,39 +36605,39 @@ class PaypalLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). - :type host: any - :param client_id: Required. The client ID associated with your PayPal application. - :type client_id: any - :param client_secret: The client secret associated with your PayPal application. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). + :vartype host: any + :ivar client_id: Required. The client ID associated with your PayPal application. + :vartype client_id: any + :ivar client_secret: The client secret associated with your PayPal application. + :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -26436,6 +36666,39 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). + :paramtype host: any + :keyword client_id: Required. The client ID associated with your PayPal application. + :paramtype client_id: any + :keyword client_secret: The client secret associated with your PayPal application. + :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(PaypalLinkedService, self).__init__(**kwargs) self.type = 'Paypal' # type: str self.host = kwargs['host'] @@ -26452,30 +36715,30 @@ class PaypalObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -26500,6 +36763,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(PaypalObjectDataset, self).__init__(**kwargs) self.type = 'PaypalObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -26510,32 +36797,32 @@ class PaypalSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -26558,6 +36845,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(PaypalSource, self).__init__(**kwargs) self.type = 'PaypalSource' # type: str self.query = kwargs.get('query', None) @@ -26568,57 +36881,57 @@ class PhoenixLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. The IP address or host name of the Phoenix server. (i.e. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. The IP address or host name of the Phoenix server. (i.e. 192.168.222.160). - :type host: any - :param port: The TCP port that the Phoenix server uses to listen for client connections. The + :vartype host: any + :ivar port: The TCP port that the Phoenix server uses to listen for client connections. The default value is 8765. - :type port: any - :param http_path: The partial URL corresponding to the Phoenix server. (i.e. + :vartype port: any + :ivar http_path: The partial URL corresponding to the Phoenix server. (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using WindowsAzureHDInsightService. - :type http_path: any - :param authentication_type: Required. The authentication mechanism used to connect to the + :vartype http_path: any + :ivar authentication_type: Required. The authentication mechanism used to connect to the Phoenix server. Possible values include: "Anonymous", "UsernameAndPassword", "WindowsAzureHDInsightService". - :type authentication_type: str or ~azure.mgmt.datafactory.models.PhoenixAuthenticationType - :param username: The user name used to connect to the Phoenix server. - :type username: any - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.PhoenixAuthenticationType + :ivar username: The user name used to connect to the Phoenix server. + :vartype username: any + :ivar password: The password corresponding to the user name. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :type enable_ssl: any - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + :vartype enable_ssl: any + :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: any - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + :vartype trusted_cert_path: any + :ivar use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: any - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + :vartype use_system_trust_store: any + :ivar allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: any - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + :vartype allow_host_name_cn_mismatch: any + :ivar allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype allow_self_signed_server_cert: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -26652,6 +36965,57 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. The IP address or host name of the Phoenix server. (i.e. + 192.168.222.160). + :paramtype host: any + :keyword port: The TCP port that the Phoenix server uses to listen for client connections. The + default value is 8765. + :paramtype port: any + :keyword http_path: The partial URL corresponding to the Phoenix server. (i.e. + /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using + WindowsAzureHDInsightService. + :paramtype http_path: any + :keyword authentication_type: Required. The authentication mechanism used to connect to the + Phoenix server. Possible values include: "Anonymous", "UsernameAndPassword", + "WindowsAzureHDInsightService". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.PhoenixAuthenticationType + :keyword username: The user name used to connect to the Phoenix server. + :paramtype username: any + :keyword password: The password corresponding to the user name. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword enable_ssl: Specifies whether the connections to the server are encrypted using SSL. + The default value is false. + :paramtype enable_ssl: any + :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates + for verifying the server when connecting over SSL. This property can only be set when using SSL + on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :paramtype trusted_cert_path: any + :keyword use_system_trust_store: Specifies whether to use a CA certificate from the system + trust store or from a specified PEM file. The default value is false. + :paramtype use_system_trust_store: any + :keyword allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :paramtype allow_host_name_cn_mismatch: any + :keyword allow_self_signed_server_cert: Specifies whether to allow self-signed certificates + from the server. The default value is false. + :paramtype allow_self_signed_server_cert: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(PhoenixLinkedService, self).__init__(**kwargs) self.type = 'Phoenix' # type: str self.host = kwargs['host'] @@ -26673,37 +37037,37 @@ class PhoenixObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param table: The table name of the Phoenix. Type: string (or Expression with resultType + :vartype table_name: any + :ivar table: The table name of the Phoenix. Type: string (or Expression with resultType string). - :type table: any - :param schema_type_properties_schema: The schema name of the Phoenix. Type: string (or + :vartype table: any + :ivar schema_type_properties_schema: The schema name of the Phoenix. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any + :vartype schema_type_properties_schema: any """ _validation = { @@ -26730,6 +37094,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword table: The table name of the Phoenix. Type: string (or Expression with resultType + string). + :paramtype table: any + :keyword schema_type_properties_schema: The schema name of the Phoenix. Type: string (or + Expression with resultType string). + :paramtype schema_type_properties_schema: any + """ super(PhoenixObjectDataset, self).__init__(**kwargs) self.type = 'PhoenixObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -26742,32 +37137,32 @@ class PhoenixSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -26790,6 +37185,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(PhoenixSource, self).__init__(**kwargs) self.type = 'PhoenixSource' # type: str self.query = kwargs.get('query', None) @@ -26798,8 +37219,8 @@ def __init__( class PipelineElapsedTimeMetricPolicy(msrest.serialization.Model): """Pipeline ElapsedTime Metric Policy. - :param duration: TimeSpan value, after which an Azure Monitoring Metric is fired. - :type duration: any + :ivar duration: TimeSpan value, after which an Azure Monitoring Metric is fired. + :vartype duration: any """ _attribute_map = { @@ -26810,6 +37231,10 @@ def __init__( self, **kwargs ): + """ + :keyword duration: TimeSpan value, after which an Azure Monitoring Metric is fired. + :paramtype duration: any + """ super(PipelineElapsedTimeMetricPolicy, self).__init__(**kwargs) self.duration = kwargs.get('duration', None) @@ -26817,8 +37242,8 @@ def __init__( class PipelineFolder(msrest.serialization.Model): """The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. - :param name: The name of the folder that this Pipeline is in. - :type name: str + :ivar name: The name of the folder that this Pipeline is in. + :vartype name: str """ _attribute_map = { @@ -26829,6 +37254,10 @@ def __init__( self, **kwargs ): + """ + :keyword name: The name of the folder that this Pipeline is in. + :paramtype name: str + """ super(PipelineFolder, self).__init__(**kwargs) self.name = kwargs.get('name', None) @@ -26838,10 +37267,10 @@ class PipelineListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of pipelines. - :type value: list[~azure.mgmt.datafactory.models.PipelineResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: Required. List of pipelines. + :vartype value: list[~azure.mgmt.datafactory.models.PipelineResource] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _validation = { @@ -26857,6 +37286,12 @@ def __init__( self, **kwargs ): + """ + :keyword value: Required. List of pipelines. + :paramtype value: list[~azure.mgmt.datafactory.models.PipelineResource] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(PipelineListResponse, self).__init__(**kwargs) self.value = kwargs['value'] self.next_link = kwargs.get('next_link', None) @@ -26865,8 +37300,8 @@ def __init__( class PipelinePolicy(msrest.serialization.Model): """Pipeline Policy. - :param elapsed_time_metric: Pipeline ElapsedTime Metric Policy. - :type elapsed_time_metric: ~azure.mgmt.datafactory.models.PipelineElapsedTimeMetricPolicy + :ivar elapsed_time_metric: Pipeline ElapsedTime Metric Policy. + :vartype elapsed_time_metric: ~azure.mgmt.datafactory.models.PipelineElapsedTimeMetricPolicy """ _attribute_map = { @@ -26877,6 +37312,10 @@ def __init__( self, **kwargs ): + """ + :keyword elapsed_time_metric: Pipeline ElapsedTime Metric Policy. + :paramtype elapsed_time_metric: ~azure.mgmt.datafactory.models.PipelineElapsedTimeMetricPolicy + """ super(PipelinePolicy, self).__init__(**kwargs) self.elapsed_time_metric = kwargs.get('elapsed_time_metric', None) @@ -26890,10 +37329,10 @@ class PipelineReference(msrest.serialization.Model): :ivar type: Pipeline reference type. Has constant value: "PipelineReference". :vartype type: str - :param reference_name: Required. Reference pipeline name. - :type reference_name: str - :param name: Reference name. - :type name: str + :ivar reference_name: Required. Reference pipeline name. + :vartype reference_name: str + :ivar name: Reference name. + :vartype name: str """ _validation = { @@ -26913,6 +37352,12 @@ def __init__( self, **kwargs ): + """ + :keyword reference_name: Required. Reference pipeline name. + :paramtype reference_name: str + :keyword name: Reference name. + :paramtype name: str + """ super(PipelineReference, self).__init__(**kwargs) self.reference_name = kwargs['reference_name'] self.name = kwargs.get('name', None) @@ -26931,28 +37376,28 @@ class PipelineResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param description: The description of the pipeline. - :type description: str - :param activities: List of activities in pipeline. - :type activities: list[~azure.mgmt.datafactory.models.Activity] - :param parameters: List of parameters for pipeline. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param variables: List of variables for pipeline. - :type variables: dict[str, ~azure.mgmt.datafactory.models.VariableSpecification] - :param concurrency: The max number of concurrent runs for the pipeline. - :type concurrency: int - :param annotations: List of tags that can be used for describing the Pipeline. - :type annotations: list[any] - :param run_dimensions: Dimensions emitted by Pipeline. - :type run_dimensions: dict[str, any] - :param folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar description: The description of the pipeline. + :vartype description: str + :ivar activities: List of activities in pipeline. + :vartype activities: list[~azure.mgmt.datafactory.models.Activity] + :ivar parameters: List of parameters for pipeline. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar variables: List of variables for pipeline. + :vartype variables: dict[str, ~azure.mgmt.datafactory.models.VariableSpecification] + :ivar concurrency: The max number of concurrent runs for the pipeline. + :vartype concurrency: int + :ivar annotations: List of tags that can be used for describing the Pipeline. + :vartype annotations: list[any] + :ivar run_dimensions: Dimensions emitted by Pipeline. + :vartype run_dimensions: dict[str, any] + :ivar folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.PipelineFolder - :param policy: Pipeline Policy. - :type policy: ~azure.mgmt.datafactory.models.PipelinePolicy + :vartype folder: ~azure.mgmt.datafactory.models.PipelineFolder + :ivar policy: Pipeline Policy. + :vartype policy: ~azure.mgmt.datafactory.models.PipelinePolicy """ _validation = { @@ -26984,6 +37429,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: The description of the pipeline. + :paramtype description: str + :keyword activities: List of activities in pipeline. + :paramtype activities: list[~azure.mgmt.datafactory.models.Activity] + :keyword parameters: List of parameters for pipeline. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword variables: List of variables for pipeline. + :paramtype variables: dict[str, ~azure.mgmt.datafactory.models.VariableSpecification] + :keyword concurrency: The max number of concurrent runs for the pipeline. + :paramtype concurrency: int + :keyword annotations: List of tags that can be used for describing the Pipeline. + :paramtype annotations: list[any] + :keyword run_dimensions: Dimensions emitted by Pipeline. + :paramtype run_dimensions: dict[str, any] + :keyword folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.PipelineFolder + :keyword policy: Pipeline Policy. + :paramtype policy: ~azure.mgmt.datafactory.models.PipelinePolicy + """ super(PipelineResource, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.description = kwargs.get('description', None) @@ -27002,9 +37471,9 @@ class PipelineRun(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] + :vartype additional_properties: dict[str, any] :ivar run_id: Identifier of a run. :vartype run_id: str :ivar run_group_id: Identifier that correlates all the recovery runs of a pipeline run. @@ -27072,6 +37541,11 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(PipelineRun, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.run_id = None @@ -27126,6 +37600,8 @@ def __init__( self, **kwargs ): + """ + """ super(PipelineRunInvokedBy, self).__init__(**kwargs) self.name = None self.id = None @@ -27139,11 +37615,11 @@ class PipelineRunsQueryResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of pipeline runs. - :type value: list[~azure.mgmt.datafactory.models.PipelineRun] - :param continuation_token: The continuation token for getting the next page of results, if any + :ivar value: Required. List of pipeline runs. + :vartype value: list[~azure.mgmt.datafactory.models.PipelineRun] + :ivar continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. - :type continuation_token: str + :vartype continuation_token: str """ _validation = { @@ -27159,6 +37635,13 @@ def __init__( self, **kwargs ): + """ + :keyword value: Required. List of pipeline runs. + :paramtype value: list[~azure.mgmt.datafactory.models.PipelineRun] + :keyword continuation_token: The continuation token for getting the next page of results, if + any remaining results exist, null otherwise. + :paramtype continuation_token: str + """ super(PipelineRunsQueryResponse, self).__init__(**kwargs) self.value = kwargs['value'] self.continuation_token = kwargs.get('continuation_token', None) @@ -27167,22 +37650,22 @@ def __init__( class PolybaseSettings(msrest.serialization.Model): """PolyBase settings. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param reject_type: Reject type. Possible values include: "value", "percentage". - :type reject_type: str or ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType - :param reject_value: Specifies the value or the percentage of rows that can be rejected before + :vartype additional_properties: dict[str, any] + :ivar reject_type: Reject type. Possible values include: "value", "percentage". + :vartype reject_type: str or ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType + :ivar reject_value: Specifies the value or the percentage of rows that can be rejected before the query fails. Type: number (or Expression with resultType number), minimum: 0. - :type reject_value: any - :param reject_sample_value: Determines the number of rows to attempt to retrieve before the + :vartype reject_value: any + :ivar reject_sample_value: Determines the number of rows to attempt to retrieve before the PolyBase recalculates the percentage of rejected rows. Type: integer (or Expression with resultType integer), minimum: 0. - :type reject_sample_value: any - :param use_type_default: Specifies how to handle missing values in delimited text files when + :vartype reject_sample_value: any + :ivar use_type_default: Specifies how to handle missing values in delimited text files when PolyBase retrieves data from the text file. Type: boolean (or Expression with resultType boolean). - :type use_type_default: any + :vartype use_type_default: any """ _attribute_map = { @@ -27197,6 +37680,24 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword reject_type: Reject type. Possible values include: "value", "percentage". + :paramtype reject_type: str or ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType + :keyword reject_value: Specifies the value or the percentage of rows that can be rejected + before the query fails. Type: number (or Expression with resultType number), minimum: 0. + :paramtype reject_value: any + :keyword reject_sample_value: Determines the number of rows to attempt to retrieve before the + PolyBase recalculates the percentage of rejected rows. Type: integer (or Expression with + resultType integer), minimum: 0. + :paramtype reject_sample_value: any + :keyword use_type_default: Specifies how to handle missing values in delimited text files when + PolyBase retrieves data from the text file. Type: boolean (or Expression with resultType + boolean). + :paramtype use_type_default: any + """ super(PolybaseSettings, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.reject_type = kwargs.get('reject_type', None) @@ -27210,27 +37711,27 @@ class PostgreSqlLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The connection string. - :type connection_string: any - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The connection string. + :vartype connection_string: any + :ivar password: The Azure key vault secret reference of password in connection string. + :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -27254,6 +37755,27 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The connection string. + :paramtype connection_string: any + :keyword password: The Azure key vault secret reference of password in connection string. + :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(PostgreSqlLinkedService, self).__init__(**kwargs) self.type = 'PostgreSql' # type: str self.connection_string = kwargs['connection_string'] @@ -27266,31 +37788,31 @@ class PostgreSqlSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: Database query. Type: string (or Expression with resultType string). - :type query: any + :vartype additional_columns: any + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: any """ _validation = { @@ -27313,6 +37835,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: any + """ super(PostgreSqlSource, self).__init__(**kwargs) self.type = 'PostgreSqlSource' # type: str self.query = kwargs.get('query', None) @@ -27323,36 +37870,36 @@ class PostgreSqlTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param table: The PostgreSQL table name. Type: string (or Expression with resultType string). - :type table: any - :param schema_type_properties_schema: The PostgreSQL schema name. Type: string (or Expression + :vartype table_name: any + :ivar table: The PostgreSQL table name. Type: string (or Expression with resultType string). + :vartype table: any + :ivar schema_type_properties_schema: The PostgreSQL schema name. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any + :vartype schema_type_properties_schema: any """ _validation = { @@ -27379,6 +37926,36 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword table: The PostgreSQL table name. Type: string (or Expression with resultType string). + :paramtype table: any + :keyword schema_type_properties_schema: The PostgreSQL schema name. Type: string (or Expression + with resultType string). + :paramtype schema_type_properties_schema: any + """ super(PostgreSqlTableDataset, self).__init__(**kwargs) self.type = 'PostgreSqlTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -27391,20 +37968,20 @@ class PowerQuerySink(DataFlowSink): All required parameters must be populated in order to send to Azure. - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. - :type description: str - :param dataset: Dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param linked_service: Linked service reference. - :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param flowlet: Flowlet Reference. - :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference - :param schema_linked_service: Schema linked service reference. - :type schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param script: sink script. - :type script: str + :ivar name: Required. Transformation name. + :vartype name: str + :ivar description: Transformation description. + :vartype description: str + :ivar dataset: Dataset reference. + :vartype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :ivar linked_service: Linked service reference. + :vartype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar flowlet: Flowlet Reference. + :vartype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference + :ivar schema_linked_service: Schema linked service reference. + :vartype schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar script: sink script. + :vartype script: str """ _validation = { @@ -27425,6 +38002,22 @@ def __init__( self, **kwargs ): + """ + :keyword name: Required. Transformation name. + :paramtype name: str + :keyword description: Transformation description. + :paramtype description: str + :keyword dataset: Dataset reference. + :paramtype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :keyword linked_service: Linked service reference. + :paramtype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword flowlet: Flowlet Reference. + :paramtype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference + :keyword schema_linked_service: Schema linked service reference. + :paramtype schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword script: sink script. + :paramtype script: str + """ super(PowerQuerySink, self).__init__(**kwargs) self.script = kwargs.get('script', None) @@ -27432,10 +38025,10 @@ def __init__( class PowerQuerySinkMapping(msrest.serialization.Model): """Map Power Query mashup query to sink dataset(s). - :param query_name: Name of the query in Power Query mashup document. - :type query_name: str - :param dataflow_sinks: List of sinks mapped to Power Query mashup query. - :type dataflow_sinks: list[~azure.mgmt.datafactory.models.PowerQuerySink] + :ivar query_name: Name of the query in Power Query mashup document. + :vartype query_name: str + :ivar dataflow_sinks: List of sinks mapped to Power Query mashup query. + :vartype dataflow_sinks: list[~azure.mgmt.datafactory.models.PowerQuerySink] """ _attribute_map = { @@ -27447,6 +38040,12 @@ def __init__( self, **kwargs ): + """ + :keyword query_name: Name of the query in Power Query mashup document. + :paramtype query_name: str + :keyword dataflow_sinks: List of sinks mapped to Power Query mashup query. + :paramtype dataflow_sinks: list[~azure.mgmt.datafactory.models.PowerQuerySink] + """ super(PowerQuerySinkMapping, self).__init__(**kwargs) self.query_name = kwargs.get('query_name', None) self.dataflow_sinks = kwargs.get('dataflow_sinks', None) @@ -27457,20 +38056,20 @@ class PowerQuerySource(DataFlowSource): All required parameters must be populated in order to send to Azure. - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. - :type description: str - :param dataset: Dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param linked_service: Linked service reference. - :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param flowlet: Flowlet Reference. - :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference - :param schema_linked_service: Schema linked service reference. - :type schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param script: source script. - :type script: str + :ivar name: Required. Transformation name. + :vartype name: str + :ivar description: Transformation description. + :vartype description: str + :ivar dataset: Dataset reference. + :vartype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :ivar linked_service: Linked service reference. + :vartype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar flowlet: Flowlet Reference. + :vartype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference + :ivar schema_linked_service: Schema linked service reference. + :vartype schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar script: source script. + :vartype script: str """ _validation = { @@ -27491,6 +38090,22 @@ def __init__( self, **kwargs ): + """ + :keyword name: Required. Transformation name. + :paramtype name: str + :keyword description: Transformation description. + :paramtype description: str + :keyword dataset: Dataset reference. + :paramtype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :keyword linked_service: Linked service reference. + :paramtype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword flowlet: Flowlet Reference. + :paramtype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference + :keyword schema_linked_service: Schema linked service reference. + :paramtype schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword script: source script. + :paramtype script: str + """ super(PowerQuerySource, self).__init__(**kwargs) self.script = kwargs.get('script', None) @@ -27500,59 +38115,58 @@ class PrestoLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. The IP address or host name of the Presto server. (i.e. - 192.168.222.160). - :type host: any - :param server_version: Required. The version of the Presto server. (i.e. 0.148-t). - :type server_version: any - :param catalog: Required. The catalog context for all request against the server. - :type catalog: any - :param port: The TCP port that the Presto server uses to listen for client connections. The + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. The IP address or host name of the Presto server. (i.e. 192.168.222.160). + :vartype host: any + :ivar server_version: Required. The version of the Presto server. (i.e. 0.148-t). + :vartype server_version: any + :ivar catalog: Required. The catalog context for all request against the server. + :vartype catalog: any + :ivar port: The TCP port that the Presto server uses to listen for client connections. The default value is 8080. - :type port: any - :param authentication_type: Required. The authentication mechanism used to connect to the - Presto server. Possible values include: "Anonymous", "LDAP". - :type authentication_type: str or ~azure.mgmt.datafactory.models.PrestoAuthenticationType - :param username: The user name used to connect to the Presto server. - :type username: any - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + :vartype port: any + :ivar authentication_type: Required. The authentication mechanism used to connect to the Presto + server. Possible values include: "Anonymous", "LDAP". + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.PrestoAuthenticationType + :ivar username: The user name used to connect to the Presto server. + :vartype username: any + :ivar password: The password corresponding to the user name. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :type enable_ssl: any - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + :vartype enable_ssl: any + :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: any - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + :vartype trusted_cert_path: any + :ivar use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: any - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + :vartype use_system_trust_store: any + :ivar allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: any - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + :vartype allow_host_name_cn_mismatch: any + :ivar allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: any - :param time_zone_id: The local time zone used by the connection. Valid values for this option + :vartype allow_self_signed_server_cert: any + :ivar time_zone_id: The local time zone used by the connection. Valid values for this option are specified in the IANA Time Zone Database. The default value is the system time zone. - :type time_zone_id: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype time_zone_id: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -27590,6 +38204,59 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. The IP address or host name of the Presto server. (i.e. + 192.168.222.160). + :paramtype host: any + :keyword server_version: Required. The version of the Presto server. (i.e. 0.148-t). + :paramtype server_version: any + :keyword catalog: Required. The catalog context for all request against the server. + :paramtype catalog: any + :keyword port: The TCP port that the Presto server uses to listen for client connections. The + default value is 8080. + :paramtype port: any + :keyword authentication_type: Required. The authentication mechanism used to connect to the + Presto server. Possible values include: "Anonymous", "LDAP". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.PrestoAuthenticationType + :keyword username: The user name used to connect to the Presto server. + :paramtype username: any + :keyword password: The password corresponding to the user name. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword enable_ssl: Specifies whether the connections to the server are encrypted using SSL. + The default value is false. + :paramtype enable_ssl: any + :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates + for verifying the server when connecting over SSL. This property can only be set when using SSL + on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :paramtype trusted_cert_path: any + :keyword use_system_trust_store: Specifies whether to use a CA certificate from the system + trust store or from a specified PEM file. The default value is false. + :paramtype use_system_trust_store: any + :keyword allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :paramtype allow_host_name_cn_mismatch: any + :keyword allow_self_signed_server_cert: Specifies whether to allow self-signed certificates + from the server. The default value is false. + :paramtype allow_self_signed_server_cert: any + :keyword time_zone_id: The local time zone used by the connection. Valid values for this option + are specified in the IANA Time Zone Database. The default value is the system time zone. + :paramtype time_zone_id: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(PrestoLinkedService, self).__init__(**kwargs) self.type = 'Presto' # type: str self.host = kwargs['host'] @@ -27613,37 +38280,36 @@ class PrestoObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param table: The table name of the Presto. Type: string (or Expression with resultType - string). - :type table: any - :param schema_type_properties_schema: The schema name of the Presto. Type: string (or - Expression with resultType string). - :type schema_type_properties_schema: any + :vartype table_name: any + :ivar table: The table name of the Presto. Type: string (or Expression with resultType string). + :vartype table: any + :ivar schema_type_properties_schema: The schema name of the Presto. Type: string (or Expression + with resultType string). + :vartype schema_type_properties_schema: any """ _validation = { @@ -27670,6 +38336,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword table: The table name of the Presto. Type: string (or Expression with resultType + string). + :paramtype table: any + :keyword schema_type_properties_schema: The schema name of the Presto. Type: string (or + Expression with resultType string). + :paramtype schema_type_properties_schema: any + """ super(PrestoObjectDataset, self).__init__(**kwargs) self.type = 'PrestoObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -27682,32 +38379,32 @@ class PrestoSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -27730,6 +38427,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(PrestoSource, self).__init__(**kwargs) self.type = 'PrestoSource' # type: str self.query = kwargs.get('query', None) @@ -27740,10 +38463,10 @@ class PrivateEndpointConnectionListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of Private Endpoint Connections. - :type value: list[~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: Required. List of Private Endpoint Connections. + :vartype value: list[~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _validation = { @@ -27759,6 +38482,12 @@ def __init__( self, **kwargs ): + """ + :keyword value: Required. List of Private Endpoint Connections. + :paramtype value: list[~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(PrivateEndpointConnectionListResponse, self).__init__(**kwargs) self.value = kwargs['value'] self.next_link = kwargs.get('next_link', None) @@ -27777,8 +38506,8 @@ class PrivateEndpointConnectionResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Core resource properties. - :type properties: ~azure.mgmt.datafactory.models.RemotePrivateEndpointConnection + :ivar properties: Core resource properties. + :vartype properties: ~azure.mgmt.datafactory.models.RemotePrivateEndpointConnection """ _validation = { @@ -27800,6 +38529,10 @@ def __init__( self, **kwargs ): + """ + :keyword properties: Core resource properties. + :paramtype properties: ~azure.mgmt.datafactory.models.RemotePrivateEndpointConnection + """ super(PrivateEndpointConnectionResource, self).__init__(**kwargs) self.properties = kwargs.get('properties', None) @@ -27807,8 +38540,8 @@ def __init__( class PrivateLinkConnectionApprovalRequest(msrest.serialization.Model): """A request to approve or reject a private endpoint connection. - :param private_link_service_connection_state: The state of a private link connection. - :type private_link_service_connection_state: + :ivar private_link_service_connection_state: The state of a private link connection. + :vartype private_link_service_connection_state: ~azure.mgmt.datafactory.models.PrivateLinkConnectionState """ @@ -27820,6 +38553,11 @@ def __init__( self, **kwargs ): + """ + :keyword private_link_service_connection_state: The state of a private link connection. + :paramtype private_link_service_connection_state: + ~azure.mgmt.datafactory.models.PrivateLinkConnectionState + """ super(PrivateLinkConnectionApprovalRequest, self).__init__(**kwargs) self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) @@ -27837,8 +38575,8 @@ class PrivateLinkConnectionApprovalRequestResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Core resource properties. - :type properties: ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequest + :ivar properties: Core resource properties. + :vartype properties: ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequest """ _validation = { @@ -27860,6 +38598,10 @@ def __init__( self, **kwargs ): + """ + :keyword properties: Core resource properties. + :paramtype properties: ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequest + """ super(PrivateLinkConnectionApprovalRequestResource, self).__init__(**kwargs) self.properties = kwargs.get('properties', None) @@ -27867,12 +38609,12 @@ def __init__( class PrivateLinkConnectionState(msrest.serialization.Model): """The state of a private link connection. - :param status: Status of a private link connection. - :type status: str - :param description: Description of a private link connection. - :type description: str - :param actions_required: ActionsRequired for a private link connection. - :type actions_required: str + :ivar status: Status of a private link connection. + :vartype status: str + :ivar description: Description of a private link connection. + :vartype description: str + :ivar actions_required: ActionsRequired for a private link connection. + :vartype actions_required: str """ _attribute_map = { @@ -27885,6 +38627,14 @@ def __init__( self, **kwargs ): + """ + :keyword status: Status of a private link connection. + :paramtype status: str + :keyword description: Description of a private link connection. + :paramtype description: str + :keyword actions_required: ActionsRequired for a private link connection. + :paramtype actions_required: str + """ super(PrivateLinkConnectionState, self).__init__(**kwargs) self.status = kwargs.get('status', None) self.description = kwargs.get('description', None) @@ -27904,8 +38654,8 @@ class PrivateLinkResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Core resource properties. - :type properties: ~azure.mgmt.datafactory.models.PrivateLinkResourceProperties + :ivar properties: Core resource properties. + :vartype properties: ~azure.mgmt.datafactory.models.PrivateLinkResourceProperties """ _validation = { @@ -27927,6 +38677,10 @@ def __init__( self, **kwargs ): + """ + :keyword properties: Core resource properties. + :paramtype properties: ~azure.mgmt.datafactory.models.PrivateLinkResourceProperties + """ super(PrivateLinkResource, self).__init__(**kwargs) self.properties = kwargs.get('properties', None) @@ -27960,6 +38714,8 @@ def __init__( self, **kwargs ): + """ + """ super(PrivateLinkResourceProperties, self).__init__(**kwargs) self.group_id = None self.required_members = None @@ -27971,8 +38727,8 @@ class PrivateLinkResourcesWrapper(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. - :type value: list[~azure.mgmt.datafactory.models.PrivateLinkResource] + :ivar value: Required. + :vartype value: list[~azure.mgmt.datafactory.models.PrivateLinkResource] """ _validation = { @@ -27987,6 +38743,10 @@ def __init__( self, **kwargs ): + """ + :keyword value: Required. + :paramtype value: list[~azure.mgmt.datafactory.models.PrivateLinkResource] + """ super(PrivateLinkResourcesWrapper, self).__init__(**kwargs) self.value = kwargs['value'] @@ -27994,10 +38754,10 @@ def __init__( class QueryDataFlowDebugSessionsResponse(msrest.serialization.Model): """A list of active debug sessions. - :param value: Array with all active debug sessions. - :type value: list[~azure.mgmt.datafactory.models.DataFlowDebugSessionInfo] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: Array with all active debug sessions. + :vartype value: list[~azure.mgmt.datafactory.models.DataFlowDebugSessionInfo] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _attribute_map = { @@ -28009,6 +38769,12 @@ def __init__( self, **kwargs ): + """ + :keyword value: Array with all active debug sessions. + :paramtype value: list[~azure.mgmt.datafactory.models.DataFlowDebugSessionInfo] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(QueryDataFlowDebugSessionsResponse, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = kwargs.get('next_link', None) @@ -28019,41 +38785,41 @@ class QuickBooksLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_properties: Properties used to connect to QuickBooks. It is mutually - exclusive with any other properties in the linked service. Type: object. - :type connection_properties: any - :param endpoint: The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com). - :type endpoint: any - :param company_id: The company ID of the QuickBooks company to authorize. - :type company_id: any - :param consumer_key: The consumer key for OAuth 1.0 authentication. - :type consumer_key: any - :param consumer_secret: The consumer secret for OAuth 1.0 authentication. - :type consumer_secret: ~azure.mgmt.datafactory.models.SecretBase - :param access_token: The access token for OAuth 1.0 authentication. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param access_token_secret: The access token secret for OAuth 1.0 authentication. - :type access_token_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_properties: Properties used to connect to QuickBooks. It is mutually exclusive + with any other properties in the linked service. Type: object. + :vartype connection_properties: any + :ivar endpoint: The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com). + :vartype endpoint: any + :ivar company_id: The company ID of the QuickBooks company to authorize. + :vartype company_id: any + :ivar consumer_key: The consumer key for OAuth 1.0 authentication. + :vartype consumer_key: any + :ivar consumer_secret: The consumer secret for OAuth 1.0 authentication. + :vartype consumer_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar access_token: The access token for OAuth 1.0 authentication. + :vartype access_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar access_token_secret: The access token secret for OAuth 1.0 authentication. + :vartype access_token_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_encrypted_endpoints: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -28082,6 +38848,41 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_properties: Properties used to connect to QuickBooks. It is mutually + exclusive with any other properties in the linked service. Type: object. + :paramtype connection_properties: any + :keyword endpoint: The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com). + :paramtype endpoint: any + :keyword company_id: The company ID of the QuickBooks company to authorize. + :paramtype company_id: any + :keyword consumer_key: The consumer key for OAuth 1.0 authentication. + :paramtype consumer_key: any + :keyword consumer_secret: The consumer secret for OAuth 1.0 authentication. + :paramtype consumer_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword access_token: The access token for OAuth 1.0 authentication. + :paramtype access_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword access_token_secret: The access token secret for OAuth 1.0 authentication. + :paramtype access_token_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(QuickBooksLinkedService, self).__init__(**kwargs) self.type = 'QuickBooks' # type: str self.connection_properties = kwargs.get('connection_properties', None) @@ -28100,30 +38901,30 @@ class QuickBooksObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -28148,6 +38949,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(QuickBooksObjectDataset, self).__init__(**kwargs) self.type = 'QuickBooksObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -28158,32 +38983,32 @@ class QuickBooksSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -28206,6 +39031,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(QuickBooksSource, self).__init__(**kwargs) self.type = 'QuickBooksSource' # type: str self.query = kwargs.get('query', None) @@ -28214,19 +39065,19 @@ def __init__( class RecurrenceSchedule(msrest.serialization.Model): """The recurrence schedule. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param minutes: The minutes. - :type minutes: list[int] - :param hours: The hours. - :type hours: list[int] - :param week_days: The days of the week. - :type week_days: list[str or ~azure.mgmt.datafactory.models.DaysOfWeek] - :param month_days: The month days. - :type month_days: list[int] - :param monthly_occurrences: The monthly occurrences. - :type monthly_occurrences: list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] + :vartype additional_properties: dict[str, any] + :ivar minutes: The minutes. + :vartype minutes: list[int] + :ivar hours: The hours. + :vartype hours: list[int] + :ivar week_days: The days of the week. + :vartype week_days: list[str or ~azure.mgmt.datafactory.models.DaysOfWeek] + :ivar month_days: The month days. + :vartype month_days: list[int] + :ivar monthly_occurrences: The monthly occurrences. + :vartype monthly_occurrences: list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] """ _attribute_map = { @@ -28242,6 +39093,22 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword minutes: The minutes. + :paramtype minutes: list[int] + :keyword hours: The hours. + :paramtype hours: list[int] + :keyword week_days: The days of the week. + :paramtype week_days: list[str or ~azure.mgmt.datafactory.models.DaysOfWeek] + :keyword month_days: The month days. + :paramtype month_days: list[int] + :keyword monthly_occurrences: The monthly occurrences. + :paramtype monthly_occurrences: + list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] + """ super(RecurrenceSchedule, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.minutes = kwargs.get('minutes', None) @@ -28254,14 +39121,14 @@ def __init__( class RecurrenceScheduleOccurrence(msrest.serialization.Model): """The recurrence schedule occurrence. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param day: The day of the week. Possible values include: "Sunday", "Monday", "Tuesday", + :vartype additional_properties: dict[str, any] + :ivar day: The day of the week. Possible values include: "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday". - :type day: str or ~azure.mgmt.datafactory.models.DayOfWeek - :param occurrence: The occurrence. - :type occurrence: int + :vartype day: str or ~azure.mgmt.datafactory.models.DayOfWeek + :ivar occurrence: The occurrence. + :vartype occurrence: int """ _attribute_map = { @@ -28274,6 +39141,16 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword day: The day of the week. Possible values include: "Sunday", "Monday", "Tuesday", + "Wednesday", "Thursday", "Friday", "Saturday". + :paramtype day: str or ~azure.mgmt.datafactory.models.DayOfWeek + :keyword occurrence: The occurrence. + :paramtype occurrence: int + """ super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.day = kwargs.get('day', None) @@ -28285,17 +39162,17 @@ class RedirectIncompatibleRowSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param linked_service_name: Required. Name of the Azure Storage, Storage SAS, or Azure Data - Lake Store linked service used for redirecting incompatible row. Must be specified if + :vartype additional_properties: dict[str, any] + :ivar linked_service_name: Required. Name of the Azure Storage, Storage SAS, or Azure Data Lake + Store linked service used for redirecting incompatible row. Must be specified if redirectIncompatibleRowSettings is specified. Type: string (or Expression with resultType string). - :type linked_service_name: any - :param path: The path for storing the redirect incompatible row data. Type: string (or + :vartype linked_service_name: any + :ivar path: The path for storing the redirect incompatible row data. Type: string (or Expression with resultType string). - :type path: any + :vartype path: any """ _validation = { @@ -28312,6 +39189,19 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword linked_service_name: Required. Name of the Azure Storage, Storage SAS, or Azure Data + Lake Store linked service used for redirecting incompatible row. Must be specified if + redirectIncompatibleRowSettings is specified. Type: string (or Expression with resultType + string). + :paramtype linked_service_name: any + :keyword path: The path for storing the redirect incompatible row data. Type: string (or + Expression with resultType string). + :paramtype path: any + """ super(RedirectIncompatibleRowSettings, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.linked_service_name = kwargs['linked_service_name'] @@ -28323,13 +39213,13 @@ class RedshiftUnloadSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param s3_linked_service_name: Required. The name of the Amazon S3 linked service which will be + :ivar s3_linked_service_name: Required. The name of the Amazon S3 linked service which will be used for the unload operation when copying from the Amazon Redshift source. - :type s3_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param bucket_name: Required. The bucket of the interim Amazon S3 which will be used to store + :vartype s3_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar bucket_name: Required. The bucket of the interim Amazon S3 which will be used to store the unloaded data from Amazon Redshift source. The bucket must be in the same region as the Amazon Redshift source. Type: string (or Expression with resultType string). - :type bucket_name: any + :vartype bucket_name: any """ _validation = { @@ -28346,6 +39236,15 @@ def __init__( self, **kwargs ): + """ + :keyword s3_linked_service_name: Required. The name of the Amazon S3 linked service which will + be used for the unload operation when copying from the Amazon Redshift source. + :paramtype s3_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword bucket_name: Required. The bucket of the interim Amazon S3 which will be used to store + the unloaded data from Amazon Redshift source. The bucket must be in the same region as the + Amazon Redshift source. Type: string (or Expression with resultType string). + :paramtype bucket_name: any + """ super(RedshiftUnloadSettings, self).__init__(**kwargs) self.s3_linked_service_name = kwargs['s3_linked_service_name'] self.bucket_name = kwargs['bucket_name'] @@ -28356,28 +39255,28 @@ class RelationalSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: Database query. Type: string (or Expression with resultType string). - :type query: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype disable_metrics_collection: any + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -28399,6 +39298,28 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(RelationalSource, self).__init__(**kwargs) self.type = 'RelationalSource' # type: str self.query = kwargs.get('query', None) @@ -28410,31 +39331,31 @@ class RelationalTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The relational table name. Type: string (or Expression with resultType + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The relational table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype table_name: any """ _validation = { @@ -28459,6 +39380,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The relational table name. Type: string (or Expression with resultType + string). + :paramtype table_name: any + """ super(RelationalTableDataset, self).__init__(**kwargs) self.type = 'RelationalTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -28471,10 +39417,10 @@ class RemotePrivateEndpointConnection(msrest.serialization.Model): :ivar provisioning_state: :vartype provisioning_state: str - :param private_endpoint: PrivateEndpoint of a remote private endpoint connection. - :type private_endpoint: ~azure.mgmt.datafactory.models.ArmIdWrapper - :param private_link_service_connection_state: The state of a private link connection. - :type private_link_service_connection_state: + :ivar private_endpoint: PrivateEndpoint of a remote private endpoint connection. + :vartype private_endpoint: ~azure.mgmt.datafactory.models.ArmIdWrapper + :ivar private_link_service_connection_state: The state of a private link connection. + :vartype private_link_service_connection_state: ~azure.mgmt.datafactory.models.PrivateLinkConnectionState """ @@ -28492,6 +39438,13 @@ def __init__( self, **kwargs ): + """ + :keyword private_endpoint: PrivateEndpoint of a remote private endpoint connection. + :paramtype private_endpoint: ~azure.mgmt.datafactory.models.ArmIdWrapper + :keyword private_link_service_connection_state: The state of a private link connection. + :paramtype private_link_service_connection_state: + ~azure.mgmt.datafactory.models.PrivateLinkConnectionState + """ super(RemotePrivateEndpointConnection, self).__init__(**kwargs) self.provisioning_state = None self.private_endpoint = kwargs.get('private_endpoint', None) @@ -28505,29 +39458,29 @@ class RerunTumblingWindowTrigger(Trigger): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. Trigger type.Constant filled by server. + :vartype type: str + :ivar description: Trigger description. + :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[any] - :param parent_trigger: Required. The parent trigger reference. - :type parent_trigger: any - :param requested_start_time: Required. The start time for the time period for which restatement + :ivar annotations: List of tags that can be used for describing the trigger. + :vartype annotations: list[any] + :ivar parent_trigger: Required. The parent trigger reference. + :vartype parent_trigger: any + :ivar requested_start_time: Required. The start time for the time period for which restatement is initiated. Only UTC time is currently supported. - :type requested_start_time: ~datetime.datetime - :param requested_end_time: Required. The end time for the time period for which restatement is + :vartype requested_start_time: ~datetime.datetime + :ivar requested_end_time: Required. The end time for the time period for which restatement is initiated. Only UTC time is currently supported. - :type requested_end_time: ~datetime.datetime - :param rerun_concurrency: Required. The max number of parallel time windows (ready for + :vartype requested_end_time: ~datetime.datetime + :ivar rerun_concurrency: Required. The max number of parallel time windows (ready for execution) for which a rerun is triggered. - :type rerun_concurrency: int + :vartype rerun_concurrency: int """ _validation = { @@ -28555,6 +39508,26 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Trigger description. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the trigger. + :paramtype annotations: list[any] + :keyword parent_trigger: Required. The parent trigger reference. + :paramtype parent_trigger: any + :keyword requested_start_time: Required. The start time for the time period for which + restatement is initiated. Only UTC time is currently supported. + :paramtype requested_start_time: ~datetime.datetime + :keyword requested_end_time: Required. The end time for the time period for which restatement + is initiated. Only UTC time is currently supported. + :paramtype requested_end_time: ~datetime.datetime + :keyword rerun_concurrency: Required. The max number of parallel time windows (ready for + execution) for which a rerun is triggered. + :paramtype rerun_concurrency: int + """ super(RerunTumblingWindowTrigger, self).__init__(**kwargs) self.type = 'RerunTumblingWindowTrigger' # type: str self.parent_trigger = kwargs['parent_trigger'] @@ -28568,42 +39541,42 @@ class ResponsysLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param endpoint: Required. The endpoint of the Responsys server. - :type endpoint: any - :param client_id: Required. The client ID associated with the Responsys application. Type: + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar endpoint: Required. The endpoint of the Responsys server. + :vartype endpoint: any + :ivar client_id: Required. The client ID associated with the Responsys application. Type: string (or Expression with resultType string). - :type client_id: any - :param client_secret: The client secret associated with the Responsys application. Type: string + :vartype client_id: any + :ivar client_secret: The client secret associated with the Responsys application. Type: string (or Expression with resultType string). - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -28632,6 +39605,42 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword endpoint: Required. The endpoint of the Responsys server. + :paramtype endpoint: any + :keyword client_id: Required. The client ID associated with the Responsys application. Type: + string (or Expression with resultType string). + :paramtype client_id: any + :keyword client_secret: The client secret associated with the Responsys application. Type: + string (or Expression with resultType string). + :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. Type: boolean (or Expression with resultType boolean). + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. Type: boolean (or Expression with resultType + boolean). + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(ResponsysLinkedService, self).__init__(**kwargs) self.type = 'Responsys' # type: str self.endpoint = kwargs['endpoint'] @@ -28648,30 +39657,30 @@ class ResponsysObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -28696,6 +39705,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(ResponsysObjectDataset, self).__init__(**kwargs) self.type = 'ResponsysObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -28706,32 +39739,32 @@ class ResponsysSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -28754,6 +39787,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(ResponsysSource, self).__init__(**kwargs) self.type = 'ResponsysSource' # type: str self.query = kwargs.get('query', None) @@ -28764,43 +39823,43 @@ class RestResourceDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param relative_url: The relative URL to the resource that the RESTful API provides. Type: + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar relative_url: The relative URL to the resource that the RESTful API provides. Type: string (or Expression with resultType string). - :type relative_url: any - :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: + :vartype relative_url: any + :ivar request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). - :type request_method: any - :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: + :vartype request_method: any + :ivar request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). - :type request_body: any - :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: + :vartype request_body: any + :ivar additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: any - :param pagination_rules: The pagination rules to compose next page requests. Type: string (or + :vartype additional_headers: any + :ivar pagination_rules: The pagination rules to compose next page requests. Type: string (or Expression with resultType string). - :type pagination_rules: any + :vartype pagination_rules: any """ _validation = { @@ -28829,6 +39888,43 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword relative_url: The relative URL to the resource that the RESTful API provides. Type: + string (or Expression with resultType string). + :paramtype relative_url: any + :keyword request_method: The HTTP method used to call the RESTful API. The default is GET. + Type: string (or Expression with resultType string). + :paramtype request_method: any + :keyword request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: + string (or Expression with resultType string). + :paramtype request_body: any + :keyword additional_headers: The additional HTTP headers in the request to the RESTful API. + Type: string (or Expression with resultType string). + :paramtype additional_headers: any + :keyword pagination_rules: The pagination rules to compose next page requests. Type: string (or + Expression with resultType string). + :paramtype pagination_rules: any + """ super(RestResourceDataset, self).__init__(**kwargs) self.type = 'RestResource' # type: str self.relative_url = kwargs.get('relative_url', None) @@ -28843,57 +39939,58 @@ class RestServiceLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param url: Required. The base URL of the REST service. - :type url: any - :param enable_server_certificate_validation: Whether to validate server side SSL certificate + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar url: Required. The base URL of the REST service. + :vartype url: any + :ivar enable_server_certificate_validation: Whether to validate server side SSL certificate when connecting to the endpoint.The default value is true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: any - :param authentication_type: Required. Type of authentication used to connect to the REST + :vartype enable_server_certificate_validation: any + :ivar authentication_type: Required. Type of authentication used to connect to the REST service. Possible values include: "Anonymous", "Basic", "AadServicePrincipal", "ManagedServiceIdentity". - :type authentication_type: str or ~azure.mgmt.datafactory.models.RestServiceAuthenticationType - :param user_name: The user name used in Basic authentication type. - :type user_name: any - :param password: The password used in Basic authentication type. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param auth_headers: The additional HTTP headers in the request to RESTful API used for + :vartype authentication_type: str or + ~azure.mgmt.datafactory.models.RestServiceAuthenticationType + :ivar user_name: The user name used in Basic authentication type. + :vartype user_name: any + :ivar password: The password used in Basic authentication type. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar auth_headers: The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). - :type auth_headers: any - :param service_principal_id: The application's client ID used in AadServicePrincipal + :vartype auth_headers: any + :ivar service_principal_id: The application's client ID used in AadServicePrincipal authentication type. - :type service_principal_id: any - :param service_principal_key: The application's key used in AadServicePrincipal authentication + :vartype service_principal_id: any + :ivar service_principal_key: The application's key used in AadServicePrincipal authentication type. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The tenant information (domain name or tenant ID) used in AadServicePrincipal + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: The tenant information (domain name or tenant ID) used in AadServicePrincipal authentication type under which your application resides. - :type tenant: any - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + :vartype tenant: any + :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: any - :param aad_resource_id: The resource you are requesting authorization to use. - :type aad_resource_id: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype azure_cloud_type: any + :ivar aad_resource_id: The resource you are requesting authorization to use. + :vartype aad_resource_id: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype encrypted_credential: any + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -28928,6 +40025,58 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword url: Required. The base URL of the REST service. + :paramtype url: any + :keyword enable_server_certificate_validation: Whether to validate server side SSL certificate + when connecting to the endpoint.The default value is true. Type: boolean (or Expression with + resultType boolean). + :paramtype enable_server_certificate_validation: any + :keyword authentication_type: Required. Type of authentication used to connect to the REST + service. Possible values include: "Anonymous", "Basic", "AadServicePrincipal", + "ManagedServiceIdentity". + :paramtype authentication_type: str or + ~azure.mgmt.datafactory.models.RestServiceAuthenticationType + :keyword user_name: The user name used in Basic authentication type. + :paramtype user_name: any + :keyword password: The password used in Basic authentication type. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword auth_headers: The additional HTTP headers in the request to RESTful API used for + authorization. Type: object (or Expression with resultType object). + :paramtype auth_headers: any + :keyword service_principal_id: The application's client ID used in AadServicePrincipal + authentication type. + :paramtype service_principal_id: any + :keyword service_principal_key: The application's key used in AadServicePrincipal + authentication type. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: The tenant information (domain name or tenant ID) used in AadServicePrincipal + authentication type under which your application resides. + :paramtype tenant: any + :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. + Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is + the data factory regions’ cloud type. Type: string (or Expression with resultType string). + :paramtype azure_cloud_type: any + :keyword aad_resource_id: The resource you are requesting authorization to use. + :paramtype aad_resource_id: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(RestServiceLinkedService, self).__init__(**kwargs) self.type = 'RestService' # type: str self.url = kwargs['url'] @@ -28950,45 +40099,45 @@ class RestSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param request_method: The HTTP method used to call the RESTful API. The default is POST. Type: + :vartype disable_metrics_collection: any + :ivar request_method: The HTTP method used to call the RESTful API. The default is POST. Type: string (or Expression with resultType string). - :type request_method: any - :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: + :vartype request_method: any + :ivar additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: any - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + :vartype additional_headers: any + :ivar http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: any - :param request_interval: The time to await before sending next request, in milliseconds. - :type request_interval: any - :param http_compression_type: Http Compression Type to Send data in compressed format with + :vartype http_request_timeout: any + :ivar request_interval: The time to await before sending next request, in milliseconds. + :vartype request_interval: any + :ivar http_compression_type: Http Compression Type to Send data in compressed format with Optimal Compression Level, Default is None. And The Only Supported option is Gzip. - :type http_compression_type: any + :vartype http_compression_type: any """ _validation = { @@ -29015,6 +40164,45 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword request_method: The HTTP method used to call the RESTful API. The default is POST. + Type: string (or Expression with resultType string). + :paramtype request_method: any + :keyword additional_headers: The additional HTTP headers in the request to the RESTful API. + Type: string (or Expression with resultType string). + :paramtype additional_headers: any + :keyword http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the + timeout to get a response, not the timeout to read response data. Default value: 00:01:40. + Type: string (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype http_request_timeout: any + :keyword request_interval: The time to await before sending next request, in milliseconds. + :paramtype request_interval: any + :keyword http_compression_type: Http Compression Type to Send data in compressed format with + Optimal Compression Level, Default is None. And The Only Supported option is Gzip. + :paramtype http_compression_type: any + """ super(RestSink, self).__init__(**kwargs) self.type = 'RestSink' # type: str self.request_method = kwargs.get('request_method', None) @@ -29029,45 +40217,45 @@ class RestSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: + :vartype disable_metrics_collection: any + :ivar request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). - :type request_method: any - :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: + :vartype request_method: any + :ivar request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). - :type request_body: any - :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: + :vartype request_body: any + :ivar additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: any - :param pagination_rules: The pagination rules to compose next page requests. Type: string (or + :vartype additional_headers: any + :ivar pagination_rules: The pagination rules to compose next page requests. Type: string (or Expression with resultType string). - :type pagination_rules: any - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + :vartype pagination_rules: any + :ivar http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: any - :param request_interval: The time to await before sending next page request. - :type request_interval: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype http_request_timeout: any + :ivar request_interval: The time to await before sending next page request. + :vartype request_interval: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -29094,6 +40282,45 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword request_method: The HTTP method used to call the RESTful API. The default is GET. + Type: string (or Expression with resultType string). + :paramtype request_method: any + :keyword request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: + string (or Expression with resultType string). + :paramtype request_body: any + :keyword additional_headers: The additional HTTP headers in the request to the RESTful API. + Type: string (or Expression with resultType string). + :paramtype additional_headers: any + :keyword pagination_rules: The pagination rules to compose next page requests. Type: string (or + Expression with resultType string). + :paramtype pagination_rules: any + :keyword http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the + timeout to get a response, not the timeout to read response data. Default value: 00:01:40. + Type: string (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype http_request_timeout: any + :keyword request_interval: The time to await before sending next page request. + :paramtype request_interval: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(RestSource, self).__init__(**kwargs) self.type = 'RestSource' # type: str self.request_method = kwargs.get('request_method', None) @@ -29108,11 +40335,11 @@ def __init__( class RetryPolicy(msrest.serialization.Model): """Execution policy for an activity. - :param count: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with + :ivar count: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. - :type count: any - :param interval_in_seconds: Interval between retries in seconds. Default is 30. - :type interval_in_seconds: int + :vartype count: any + :ivar interval_in_seconds: Interval between retries in seconds. Default is 30. + :vartype interval_in_seconds: int """ _validation = { @@ -29128,6 +40355,13 @@ def __init__( self, **kwargs ): + """ + :keyword count: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression + with resultType integer), minimum: 0. + :paramtype count: any + :keyword interval_in_seconds: Interval between retries in seconds. Default is 30. + :paramtype interval_in_seconds: int + """ super(RetryPolicy, self).__init__(**kwargs) self.count = kwargs.get('count', None) self.interval_in_seconds = kwargs.get('interval_in_seconds', None) @@ -29138,19 +40372,19 @@ class RunFilterParameters(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param continuation_token: The continuation token for getting the next page of results. Null - for first page. - :type continuation_token: str - :param last_updated_after: Required. The time at or after which the run event was updated in + :ivar continuation_token: The continuation token for getting the next page of results. Null for + first page. + :vartype continuation_token: str + :ivar last_updated_after: Required. The time at or after which the run event was updated in 'ISO 8601' format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: Required. The time at or before which the run event was updated in + :vartype last_updated_after: ~datetime.datetime + :ivar last_updated_before: Required. The time at or before which the run event was updated in 'ISO 8601' format. - :type last_updated_before: ~datetime.datetime - :param filters: List of filters. - :type filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~azure.mgmt.datafactory.models.RunQueryOrderBy] + :vartype last_updated_before: ~datetime.datetime + :ivar filters: List of filters. + :vartype filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] + :ivar order_by: List of OrderBy option. + :vartype order_by: list[~azure.mgmt.datafactory.models.RunQueryOrderBy] """ _validation = { @@ -29170,6 +40404,21 @@ def __init__( self, **kwargs ): + """ + :keyword continuation_token: The continuation token for getting the next page of results. Null + for first page. + :paramtype continuation_token: str + :keyword last_updated_after: Required. The time at or after which the run event was updated in + 'ISO 8601' format. + :paramtype last_updated_after: ~datetime.datetime + :keyword last_updated_before: Required. The time at or before which the run event was updated + in 'ISO 8601' format. + :paramtype last_updated_before: ~datetime.datetime + :keyword filters: List of filters. + :paramtype filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] + :keyword order_by: List of OrderBy option. + :paramtype order_by: list[~azure.mgmt.datafactory.models.RunQueryOrderBy] + """ super(RunFilterParameters, self).__init__(**kwargs) self.continuation_token = kwargs.get('continuation_token', None) self.last_updated_after = kwargs['last_updated_after'] @@ -29183,18 +40432,18 @@ class RunQueryFilter(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param operand: Required. Parameter name to be used for filter. The allowed operands to query + :ivar operand: Required. Parameter name to be used for filter. The allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd and Status; to query activity runs are ActivityName, ActivityRunStart, ActivityRunEnd, ActivityType and Status, and to query trigger runs are TriggerName, TriggerRunTimestamp and Status. Possible values include: "PipelineName", "Status", "RunStart", "RunEnd", "ActivityName", "ActivityRunStart", "ActivityRunEnd", "ActivityType", "TriggerName", "TriggerRunTimestamp", "RunGroupId", "LatestOnly". - :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand - :param operator: Required. Operator to be used for filter. Possible values include: "Equals", + :vartype operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand + :ivar operator: Required. Operator to be used for filter. Possible values include: "Equals", "NotEquals", "In", "NotIn". - :type operator: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperator - :param values: Required. List of filter values. - :type values: list[str] + :vartype operator: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperator + :ivar values: Required. List of filter values. + :vartype values: list[str] """ _validation = { @@ -29213,6 +40462,20 @@ def __init__( self, **kwargs ): + """ + :keyword operand: Required. Parameter name to be used for filter. The allowed operands to query + pipeline runs are PipelineName, RunStart, RunEnd and Status; to query activity runs are + ActivityName, ActivityRunStart, ActivityRunEnd, ActivityType and Status, and to query trigger + runs are TriggerName, TriggerRunTimestamp and Status. Possible values include: "PipelineName", + "Status", "RunStart", "RunEnd", "ActivityName", "ActivityRunStart", "ActivityRunEnd", + "ActivityType", "TriggerName", "TriggerRunTimestamp", "RunGroupId", "LatestOnly". + :paramtype operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand + :keyword operator: Required. Operator to be used for filter. Possible values include: "Equals", + "NotEquals", "In", "NotIn". + :paramtype operator: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperator + :keyword values: Required. List of filter values. + :paramtype values: list[str] + """ super(RunQueryFilter, self).__init__(**kwargs) self.operand = kwargs['operand'] self.operator = kwargs['operator'] @@ -29224,15 +40487,15 @@ class RunQueryOrderBy(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param order_by: Required. Parameter name to be used for order by. The allowed parameters to + :ivar order_by: Required. Parameter name to be used for order by. The allowed parameters to order by for pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are ActivityName, ActivityRunStart, ActivityRunEnd and Status; for trigger runs are TriggerName, TriggerRunTimestamp and Status. Possible values include: "RunStart", "RunEnd", "PipelineName", "Status", "ActivityName", "ActivityRunStart", "ActivityRunEnd", "TriggerName", "TriggerRunTimestamp". - :type order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField - :param order: Required. Sorting order of the parameter. Possible values include: "ASC", "DESC". - :type order: str or ~azure.mgmt.datafactory.models.RunQueryOrder + :vartype order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField + :ivar order: Required. Sorting order of the parameter. Possible values include: "ASC", "DESC". + :vartype order: str or ~azure.mgmt.datafactory.models.RunQueryOrder """ _validation = { @@ -29249,6 +40512,18 @@ def __init__( self, **kwargs ): + """ + :keyword order_by: Required. Parameter name to be used for order by. The allowed parameters to + order by for pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are + ActivityName, ActivityRunStart, ActivityRunEnd and Status; for trigger runs are TriggerName, + TriggerRunTimestamp and Status. Possible values include: "RunStart", "RunEnd", "PipelineName", + "Status", "ActivityName", "ActivityRunStart", "ActivityRunEnd", "TriggerName", + "TriggerRunTimestamp". + :paramtype order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField + :keyword order: Required. Sorting order of the parameter. Possible values include: "ASC", + "DESC". + :paramtype order: str or ~azure.mgmt.datafactory.models.RunQueryOrder + """ super(RunQueryOrderBy, self).__init__(**kwargs) self.order_by = kwargs['order_by'] self.order = kwargs['order'] @@ -29259,38 +40534,38 @@ class SalesforceLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param environment_url: The URL of Salesforce instance. Default is + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar environment_url: The URL of Salesforce instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). - :type environment_url: any - :param username: The username for Basic authentication of the Salesforce instance. Type: string + :vartype environment_url: any + :ivar username: The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). - :type username: any - :param password: The password for Basic authentication of the Salesforce instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param security_token: The security token is optional to remotely access Salesforce instance. - :type security_token: ~azure.mgmt.datafactory.models.SecretBase - :param api_version: The Salesforce API version used in ADF. Type: string (or Expression with + :vartype username: any + :ivar password: The password for Basic authentication of the Salesforce instance. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar security_token: The security token is optional to remotely access Salesforce instance. + :vartype security_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar api_version: The Salesforce API version used in ADF. Type: string (or Expression with resultType string). - :type api_version: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype api_version: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -29316,6 +40591,38 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword environment_url: The URL of Salesforce instance. Default is + 'https://login.salesforce.com'. To copy data from sandbox, specify + 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, + 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). + :paramtype environment_url: any + :keyword username: The username for Basic authentication of the Salesforce instance. Type: + string (or Expression with resultType string). + :paramtype username: any + :keyword password: The password for Basic authentication of the Salesforce instance. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword security_token: The security token is optional to remotely access Salesforce instance. + :paramtype security_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword api_version: The Salesforce API version used in ADF. Type: string (or Expression with + resultType string). + :paramtype api_version: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(SalesforceLinkedService, self).__init__(**kwargs) self.type = 'Salesforce' # type: str self.environment_url = kwargs.get('environment_url', None) @@ -29331,43 +40638,43 @@ class SalesforceMarketingCloudLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_properties: Properties used to connect to Salesforce Marketing Cloud. It is + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_properties: Properties used to connect to Salesforce Marketing Cloud. It is mutually exclusive with any other properties in the linked service. Type: object. - :type connection_properties: any - :param client_id: The client ID associated with the Salesforce Marketing Cloud application. + :vartype connection_properties: any + :ivar client_id: The client ID associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). - :type client_id: any - :param client_secret: The client secret associated with the Salesforce Marketing Cloud + :vartype client_id: any + :ivar client_secret: The client secret associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -29394,6 +40701,43 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_properties: Properties used to connect to Salesforce Marketing Cloud. It is + mutually exclusive with any other properties in the linked service. Type: object. + :paramtype connection_properties: any + :keyword client_id: The client ID associated with the Salesforce Marketing Cloud application. + Type: string (or Expression with resultType string). + :paramtype client_id: any + :keyword client_secret: The client secret associated with the Salesforce Marketing Cloud + application. Type: string (or Expression with resultType string). + :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. Type: boolean (or Expression with resultType boolean). + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. Type: boolean (or Expression with resultType + boolean). + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(SalesforceMarketingCloudLinkedService, self).__init__(**kwargs) self.type = 'SalesforceMarketingCloud' # type: str self.connection_properties = kwargs.get('connection_properties', None) @@ -29410,30 +40754,30 @@ class SalesforceMarketingCloudObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -29458,6 +40802,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(SalesforceMarketingCloudObjectDataset, self).__init__(**kwargs) self.type = 'SalesforceMarketingCloudObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -29468,32 +40836,32 @@ class SalesforceMarketingCloudSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -29516,6 +40884,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(SalesforceMarketingCloudSource, self).__init__(**kwargs) self.type = 'SalesforceMarketingCloudSource' # type: str self.query = kwargs.get('query', None) @@ -29526,31 +40920,31 @@ class SalesforceObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param object_api_name: The Salesforce object API name. Type: string (or Expression with + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar object_api_name: The Salesforce object API name. Type: string (or Expression with resultType string). - :type object_api_name: any + :vartype object_api_name: any """ _validation = { @@ -29575,6 +40969,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword object_api_name: The Salesforce object API name. Type: string (or Expression with + resultType string). + :paramtype object_api_name: any + """ super(SalesforceObjectDataset, self).__init__(**kwargs) self.type = 'SalesforceObject' # type: str self.object_api_name = kwargs.get('object_api_name', None) @@ -29585,41 +41004,41 @@ class SalesforceServiceCloudLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param environment_url: The URL of Salesforce Service Cloud instance. Default is + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar environment_url: The URL of Salesforce Service Cloud instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). - :type environment_url: any - :param username: The username for Basic authentication of the Salesforce instance. Type: string + :vartype environment_url: any + :ivar username: The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). - :type username: any - :param password: The password for Basic authentication of the Salesforce instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param security_token: The security token is optional to remotely access Salesforce instance. - :type security_token: ~azure.mgmt.datafactory.models.SecretBase - :param api_version: The Salesforce API version used in ADF. Type: string (or Expression with + :vartype username: any + :ivar password: The password for Basic authentication of the Salesforce instance. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar security_token: The security token is optional to remotely access Salesforce instance. + :vartype security_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar api_version: The Salesforce API version used in ADF. Type: string (or Expression with resultType string). - :type api_version: any - :param extended_properties: Extended properties appended to the connection string. Type: string + :vartype api_version: any + :ivar extended_properties: Extended properties appended to the connection string. Type: string (or Expression with resultType string). - :type extended_properties: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype extended_properties: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -29646,6 +41065,41 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword environment_url: The URL of Salesforce Service Cloud instance. Default is + 'https://login.salesforce.com'. To copy data from sandbox, specify + 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, + 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). + :paramtype environment_url: any + :keyword username: The username for Basic authentication of the Salesforce instance. Type: + string (or Expression with resultType string). + :paramtype username: any + :keyword password: The password for Basic authentication of the Salesforce instance. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword security_token: The security token is optional to remotely access Salesforce instance. + :paramtype security_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword api_version: The Salesforce API version used in ADF. Type: string (or Expression with + resultType string). + :paramtype api_version: any + :keyword extended_properties: Extended properties appended to the connection string. Type: + string (or Expression with resultType string). + :paramtype extended_properties: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(SalesforceServiceCloudLinkedService, self).__init__(**kwargs) self.type = 'SalesforceServiceCloud' # type: str self.environment_url = kwargs.get('environment_url', None) @@ -29662,31 +41116,31 @@ class SalesforceServiceCloudObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param object_api_name: The Salesforce Service Cloud object API name. Type: string (or + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar object_api_name: The Salesforce Service Cloud object API name. Type: string (or Expression with resultType string). - :type object_api_name: any + :vartype object_api_name: any """ _validation = { @@ -29711,6 +41165,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword object_api_name: The Salesforce Service Cloud object API name. Type: string (or + Expression with resultType string). + :paramtype object_api_name: any + """ super(SalesforceServiceCloudObjectDataset, self).__init__(**kwargs) self.type = 'SalesforceServiceCloudObject' # type: str self.object_api_name = kwargs.get('object_api_name', None) @@ -29721,42 +41200,42 @@ class SalesforceServiceCloudSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param write_behavior: The write behavior for the operation. Default is Insert. Possible values + :vartype disable_metrics_collection: any + :ivar write_behavior: The write behavior for the operation. Default is Insert. Possible values include: "Insert", "Upsert". - :type write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior - :param external_id_field_name: The name of the external ID field for upsert operation. Default + :vartype write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :ivar external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). - :type external_id_field_name: any - :param ignore_null_values: The flag indicating whether or not to ignore null values from input + :vartype external_id_field_name: any + :ivar ignore_null_values: The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: any + :vartype ignore_null_values: any """ _validation = { @@ -29781,6 +41260,43 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword write_behavior: The write behavior for the operation. Default is Insert. Possible + values include: "Insert", "Upsert". + :paramtype write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :keyword external_id_field_name: The name of the external ID field for upsert operation. + Default value is 'Id' column. Type: string (or Expression with resultType string). + :paramtype external_id_field_name: any + :keyword ignore_null_values: The flag indicating whether or not to ignore null values from + input dataset (except key fields) during write operation. Default value is false. If set it to + true, it means ADF will leave the data in the destination object unchanged when doing + upsert/update operation and insert defined default value when doing insert operation, versus + ADF will update the data in the destination object to NULL when doing upsert/update operation + and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType + boolean). + :paramtype ignore_null_values: any + """ super(SalesforceServiceCloudSink, self).__init__(**kwargs) self.type = 'SalesforceServiceCloudSink' # type: str self.write_behavior = kwargs.get('write_behavior', None) @@ -29793,31 +41309,31 @@ class SalesforceServiceCloudSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: Database query. Type: string (or Expression with resultType string). - :type query: any - :param read_behavior: The read behavior for the operation. Default is Query. Possible values + :vartype disable_metrics_collection: any + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: any + :ivar read_behavior: The read behavior for the operation. Default is Query. Possible values include: "Query", "QueryAll". - :type read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -29840,6 +41356,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: any + :keyword read_behavior: The read behavior for the operation. Default is Query. Possible values + include: "Query", "QueryAll". + :paramtype read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(SalesforceServiceCloudSource, self).__init__(**kwargs) self.type = 'SalesforceServiceCloudSource' # type: str self.query = kwargs.get('query', None) @@ -29852,42 +41393,42 @@ class SalesforceSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param write_behavior: The write behavior for the operation. Default is Insert. Possible values + :vartype disable_metrics_collection: any + :ivar write_behavior: The write behavior for the operation. Default is Insert. Possible values include: "Insert", "Upsert". - :type write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior - :param external_id_field_name: The name of the external ID field for upsert operation. Default + :vartype write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :ivar external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). - :type external_id_field_name: any - :param ignore_null_values: The flag indicating whether or not to ignore null values from input + :vartype external_id_field_name: any + :ivar ignore_null_values: The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: any + :vartype ignore_null_values: any """ _validation = { @@ -29912,6 +41453,43 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword write_behavior: The write behavior for the operation. Default is Insert. Possible + values include: "Insert", "Upsert". + :paramtype write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :keyword external_id_field_name: The name of the external ID field for upsert operation. + Default value is 'Id' column. Type: string (or Expression with resultType string). + :paramtype external_id_field_name: any + :keyword ignore_null_values: The flag indicating whether or not to ignore null values from + input dataset (except key fields) during write operation. Default value is false. If set it to + true, it means ADF will leave the data in the destination object unchanged when doing + upsert/update operation and insert defined default value when doing insert operation, versus + ADF will update the data in the destination object to NULL when doing upsert/update operation + and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType + boolean). + :paramtype ignore_null_values: any + """ super(SalesforceSink, self).__init__(**kwargs) self.type = 'SalesforceSink' # type: str self.write_behavior = kwargs.get('write_behavior', None) @@ -29924,34 +41502,34 @@ class SalesforceSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: Database query. Type: string (or Expression with resultType string). - :type query: any - :param read_behavior: The read behavior for the operation. Default is Query. Possible values + :vartype additional_columns: any + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: any + :ivar read_behavior: The read behavior for the operation. Default is Query. Possible values include: "Query", "QueryAll". - :type read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + :vartype read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior """ _validation = { @@ -29975,6 +41553,34 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: any + :keyword read_behavior: The read behavior for the operation. Default is Query. Possible values + include: "Query", "QueryAll". + :paramtype read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + """ super(SalesforceSource, self).__init__(**kwargs) self.type = 'SalesforceSource' # type: str self.query = kwargs.get('query', None) @@ -29986,28 +41592,28 @@ class SapBwCubeDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder """ _validation = { @@ -30031,6 +41637,28 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + """ super(SapBwCubeDataset, self).__init__(**kwargs) self.type = 'SapBwCube' # type: str @@ -30040,37 +41668,37 @@ class SapBWLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param server: Required. Host name of the SAP BW instance. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar server: Required. Host name of the SAP BW instance. Type: string (or Expression with resultType string). - :type server: any - :param system_number: Required. System number of the BW system. (Usually a two-digit decimal + :vartype server: any + :ivar system_number: Required. System number of the BW system. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). - :type system_number: any - :param client_id: Required. Client ID of the client on the BW system. (Usually a three-digit + :vartype system_number: any + :ivar client_id: Required. Client ID of the client on the BW system. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). - :type client_id: any - :param user_name: Username to access the SAP BW server. Type: string (or Expression with + :vartype client_id: any + :ivar user_name: Username to access the SAP BW server. Type: string (or Expression with resultType string). - :type user_name: any - :param password: Password to access the SAP BW server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype user_name: any + :ivar password: Password to access the SAP BW server. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -30099,6 +41727,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword server: Required. Host name of the SAP BW instance. Type: string (or Expression with + resultType string). + :paramtype server: any + :keyword system_number: Required. System number of the BW system. (Usually a two-digit decimal + number represented as a string.) Type: string (or Expression with resultType string). + :paramtype system_number: any + :keyword client_id: Required. Client ID of the client on the BW system. (Usually a three-digit + decimal number represented as a string) Type: string (or Expression with resultType string). + :paramtype client_id: any + :keyword user_name: Username to access the SAP BW server. Type: string (or Expression with + resultType string). + :paramtype user_name: any + :keyword password: Password to access the SAP BW server. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(SapBWLinkedService, self).__init__(**kwargs) self.type = 'SapBW' # type: str self.server = kwargs['server'] @@ -30114,31 +41773,31 @@ class SapBwSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: MDX query. Type: string (or Expression with resultType string). - :type query: any + :vartype additional_columns: any + :ivar query: MDX query. Type: string (or Expression with resultType string). + :vartype query: any """ _validation = { @@ -30161,6 +41820,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: MDX query. Type: string (or Expression with resultType string). + :paramtype query: any + """ super(SapBwSource, self).__init__(**kwargs) self.type = 'SapBwSource' # type: str self.query = kwargs.get('query', None) @@ -30171,32 +41855,32 @@ class SapCloudForCustomerLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param url: Required. The URL of SAP Cloud for Customer OData API. For example, + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar url: Required. The URL of SAP Cloud for Customer OData API. For example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string (or Expression with resultType string). - :type url: any - :param username: The username for Basic authentication. Type: string (or Expression with + :vartype url: any + :ivar username: The username for Basic authentication. Type: string (or Expression with resultType string). - :type username: any - :param password: The password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype username: any + :ivar password: The password for Basic authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -30221,6 +41905,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword url: Required. The URL of SAP Cloud for Customer OData API. For example, + '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string (or Expression with + resultType string). + :paramtype url: any + :keyword username: The username for Basic authentication. Type: string (or Expression with + resultType string). + :paramtype username: any + :keyword password: The password for Basic authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Either encryptedCredential or + username/password must be provided. Type: string (or Expression with resultType string). + :paramtype encrypted_credential: any + """ super(SapCloudForCustomerLinkedService, self).__init__(**kwargs) self.type = 'SapCloudForCustomer' # type: str self.url = kwargs['url'] @@ -30234,31 +41944,31 @@ class SapCloudForCustomerResourceDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param path: Required. The path of the SAP Cloud for Customer OData entity. Type: string (or + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar path: Required. The path of the SAP Cloud for Customer OData entity. Type: string (or Expression with resultType string). - :type path: any + :vartype path: any """ _validation = { @@ -30284,6 +41994,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword path: Required. The path of the SAP Cloud for Customer OData entity. Type: string (or + Expression with resultType string). + :paramtype path: any + """ super(SapCloudForCustomerResourceDataset, self).__init__(**kwargs) self.type = 'SapCloudForCustomerResource' # type: str self.path = kwargs['path'] @@ -30294,38 +42029,38 @@ class SapCloudForCustomerSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param write_behavior: The write behavior for the operation. Default is 'Insert'. Possible + :vartype disable_metrics_collection: any + :ivar write_behavior: The write behavior for the operation. Default is 'Insert'. Possible values include: "Insert", "Update". - :type write_behavior: str or + :vartype write_behavior: str or ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + :ivar http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: any + :vartype http_request_timeout: any """ _validation = { @@ -30349,6 +42084,38 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword write_behavior: The write behavior for the operation. Default is 'Insert'. Possible + values include: "Insert", "Update". + :paramtype write_behavior: str or + ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior + :keyword http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the + timeout to get a response, not the timeout to read response data. Default value: 00:05:00. + Type: string (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype http_request_timeout: any + """ super(SapCloudForCustomerSink, self).__init__(**kwargs) self.type = 'SapCloudForCustomerSink' # type: str self.write_behavior = kwargs.get('write_behavior', None) @@ -30360,37 +42127,37 @@ class SapCloudForCustomerSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or + :vartype additional_columns: any + :ivar query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :type query: any - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + :vartype query: any + :ivar http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: any + :vartype http_request_timeout: any """ _validation = { @@ -30414,6 +42181,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or + Expression with resultType string). + :paramtype query: any + :keyword http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the + timeout to get a response, not the timeout to read response data. Default value: 00:05:00. + Type: string (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype http_request_timeout: any + """ super(SapCloudForCustomerSource, self).__init__(**kwargs) self.type = 'SapCloudForCustomerSource' # type: str self.query = kwargs.get('query', None) @@ -30425,32 +42223,32 @@ class SapEccLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param url: Required. The URL of SAP ECC OData API. For example, + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar url: Required. The URL of SAP ECC OData API. For example, '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with resultType string). - :type url: str - :param username: The username for Basic authentication. Type: string (or Expression with + :vartype url: str + :ivar username: The username for Basic authentication. Type: string (or Expression with resultType string). - :type username: str - :param password: The password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype username: str + :ivar password: The password for Basic authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). - :type encrypted_credential: str + :vartype encrypted_credential: str """ _validation = { @@ -30475,6 +42273,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword url: Required. The URL of SAP ECC OData API. For example, + '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with + resultType string). + :paramtype url: str + :keyword username: The username for Basic authentication. Type: string (or Expression with + resultType string). + :paramtype username: str + :keyword password: The password for Basic authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Either encryptedCredential or + username/password must be provided. Type: string (or Expression with resultType string). + :paramtype encrypted_credential: str + """ super(SapEccLinkedService, self).__init__(**kwargs) self.type = 'SapEcc' # type: str self.url = kwargs['url'] @@ -30488,31 +42312,31 @@ class SapEccResourceDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). - :type path: any + :vartype path: any """ _validation = { @@ -30538,6 +42362,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with + resultType string). + :paramtype path: any + """ super(SapEccResourceDataset, self).__init__(**kwargs) self.type = 'SapEccResource' # type: str self.path = kwargs['path'] @@ -30548,37 +42397,37 @@ class SapEccSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with + :vartype additional_columns: any + :ivar query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :type query: any - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + :vartype query: any + :ivar http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: any + :vartype http_request_timeout: any """ _validation = { @@ -30602,6 +42451,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with + resultType string). + :paramtype query: any + :keyword http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the + timeout to get a response, not the timeout to read response data. Default value: 00:05:00. + Type: string (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype http_request_timeout: any + """ super(SapEccSource, self).__init__(**kwargs) self.type = 'SapEccSource' # type: str self.query = kwargs.get('query', None) @@ -30613,37 +42493,37 @@ class SapHanaLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: SAP HANA ODBC connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: SAP HANA ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param server: Host name of the SAP HANA server. Type: string (or Expression with resultType + :vartype connection_string: any + :ivar server: Host name of the SAP HANA server. Type: string (or Expression with resultType string). - :type server: any - :param authentication_type: The authentication type to be used to connect to the SAP HANA + :vartype server: any + :ivar authentication_type: The authentication type to be used to connect to the SAP HANA server. Possible values include: "Basic", "Windows". - :type authentication_type: str or ~azure.mgmt.datafactory.models.SapHanaAuthenticationType - :param user_name: Username to access the SAP HANA server. Type: string (or Expression with + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.SapHanaAuthenticationType + :ivar user_name: Username to access the SAP HANA server. Type: string (or Expression with resultType string). - :type user_name: any - :param password: Password to access the SAP HANA server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype user_name: any + :ivar password: Password to access the SAP HANA server. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -30669,6 +42549,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: SAP HANA ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword server: Host name of the SAP HANA server. Type: string (or Expression with resultType + string). + :paramtype server: any + :keyword authentication_type: The authentication type to be used to connect to the SAP HANA + server. Possible values include: "Basic", "Windows". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.SapHanaAuthenticationType + :keyword user_name: Username to access the SAP HANA server. Type: string (or Expression with + resultType string). + :paramtype user_name: any + :keyword password: Password to access the SAP HANA server. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(SapHanaLinkedService, self).__init__(**kwargs) self.type = 'SapHana' # type: str self.connection_string = kwargs.get('connection_string', None) @@ -30682,9 +42593,9 @@ def __init__( class SapHanaPartitionSettings(msrest.serialization.Model): """The settings that will be leveraged for SAP HANA source partitioning. - :param partition_column_name: The name of the column that will be used for proceeding range + :ivar partition_column_name: The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: any + :vartype partition_column_name: any """ _attribute_map = { @@ -30695,6 +42606,11 @@ def __init__( self, **kwargs ): + """ + :keyword partition_column_name: The name of the column that will be used for proceeding range + partitioning. Type: string (or Expression with resultType string). + :paramtype partition_column_name: any + """ super(SapHanaPartitionSettings, self).__init__(**kwargs) self.partition_column_name = kwargs.get('partition_column_name', None) @@ -30704,40 +42620,39 @@ class SapHanaSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: SAP HANA Sql query. Type: string (or Expression with resultType string). - :type query: any - :param packet_size: The packet size of data read from SAP HANA. Type: integer(or Expression - with resultType integer). - :type packet_size: any - :param partition_option: The partition mechanism that will be used for SAP HANA read in + :vartype additional_columns: any + :ivar query: SAP HANA Sql query. Type: string (or Expression with resultType string). + :vartype query: any + :ivar packet_size: The packet size of data read from SAP HANA. Type: integer(or Expression with + resultType integer). + :vartype packet_size: any + :ivar partition_option: The partition mechanism that will be used for SAP HANA read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "SapHanaDynamicRange". - :type partition_option: any - :param partition_settings: The settings that will be leveraged for SAP HANA source - partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.SapHanaPartitionSettings + :vartype partition_option: any + :ivar partition_settings: The settings that will be leveraged for SAP HANA source partitioning. + :vartype partition_settings: ~azure.mgmt.datafactory.models.SapHanaPartitionSettings """ _validation = { @@ -30763,6 +42678,40 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: SAP HANA Sql query. Type: string (or Expression with resultType string). + :paramtype query: any + :keyword packet_size: The packet size of data read from SAP HANA. Type: integer(or Expression + with resultType integer). + :paramtype packet_size: any + :keyword partition_option: The partition mechanism that will be used for SAP HANA read in + parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "SapHanaDynamicRange". + :paramtype partition_option: any + :keyword partition_settings: The settings that will be leveraged for SAP HANA source + partitioning. + :paramtype partition_settings: ~azure.mgmt.datafactory.models.SapHanaPartitionSettings + """ super(SapHanaSource, self).__init__(**kwargs) self.type = 'SapHanaSource' # type: str self.query = kwargs.get('query', None) @@ -30776,33 +42725,33 @@ class SapHanaTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param schema_type_properties_schema: The schema name of SAP HANA. Type: string (or Expression + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar schema_type_properties_schema: The schema name of SAP HANA. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any - :param table: The table name of SAP HANA. Type: string (or Expression with resultType string). - :type table: any + :vartype schema_type_properties_schema: any + :ivar table: The table name of SAP HANA. Type: string (or Expression with resultType string). + :vartype table: any """ _validation = { @@ -30828,6 +42777,34 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword schema_type_properties_schema: The schema name of SAP HANA. Type: string (or + Expression with resultType string). + :paramtype schema_type_properties_schema: any + :keyword table: The table name of SAP HANA. Type: string (or Expression with resultType + string). + :paramtype table: any + """ super(SapHanaTableDataset, self).__init__(**kwargs) self.type = 'SapHanaTable' # type: str self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -30839,55 +42816,54 @@ class SapOpenHubLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param server: Host name of the SAP BW instance where the open hub destination is located. - Type: string (or Expression with resultType string). - :type server: any - :param system_number: System number of the BW system where the open hub destination is located. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar server: Host name of the SAP BW instance where the open hub destination is located. Type: + string (or Expression with resultType string). + :vartype server: any + :ivar system_number: System number of the BW system where the open hub destination is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). - :type system_number: any - :param client_id: Client ID of the client on the BW system where the open hub destination is + :vartype system_number: any + :ivar client_id: Client ID of the client on the BW system where the open hub destination is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). - :type client_id: any - :param language: Language of the BW system where the open hub destination is located. The + :vartype client_id: any + :ivar language: Language of the BW system where the open hub destination is located. The default value is EN. Type: string (or Expression with resultType string). - :type language: any - :param system_id: SystemID of the SAP system where the table is located. Type: string (or + :vartype language: any + :ivar system_id: SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). - :type system_id: any - :param user_name: Username to access the SAP BW server where the open hub destination is + :vartype system_id: any + :ivar user_name: Username to access the SAP BW server where the open hub destination is located. Type: string (or Expression with resultType string). - :type user_name: any - :param password: Password to access the SAP BW server where the open hub destination is - located. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param message_server: The hostname of the SAP Message Server. Type: string (or Expression with + :vartype user_name: any + :ivar password: Password to access the SAP BW server where the open hub destination is located. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar message_server: The hostname of the SAP Message Server. Type: string (or Expression with resultType string). - :type message_server: any - :param message_server_service: The service name or port number of the Message Server. Type: + :vartype message_server: any + :ivar message_server_service: The service name or port number of the Message Server. Type: string (or Expression with resultType string). - :type message_server_service: any - :param logon_group: The Logon Group for the SAP System. Type: string (or Expression with + :vartype message_server_service: any + :ivar logon_group: The Logon Group for the SAP System. Type: string (or Expression with resultType string). - :type logon_group: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype logon_group: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -30918,6 +42894,55 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword server: Host name of the SAP BW instance where the open hub destination is located. + Type: string (or Expression with resultType string). + :paramtype server: any + :keyword system_number: System number of the BW system where the open hub destination is + located. (Usually a two-digit decimal number represented as a string.) Type: string (or + Expression with resultType string). + :paramtype system_number: any + :keyword client_id: Client ID of the client on the BW system where the open hub destination is + located. (Usually a three-digit decimal number represented as a string) Type: string (or + Expression with resultType string). + :paramtype client_id: any + :keyword language: Language of the BW system where the open hub destination is located. The + default value is EN. Type: string (or Expression with resultType string). + :paramtype language: any + :keyword system_id: SystemID of the SAP system where the table is located. Type: string (or + Expression with resultType string). + :paramtype system_id: any + :keyword user_name: Username to access the SAP BW server where the open hub destination is + located. Type: string (or Expression with resultType string). + :paramtype user_name: any + :keyword password: Password to access the SAP BW server where the open hub destination is + located. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword message_server: The hostname of the SAP Message Server. Type: string (or Expression + with resultType string). + :paramtype message_server: any + :keyword message_server_service: The service name or port number of the Message Server. Type: + string (or Expression with resultType string). + :paramtype message_server_service: any + :keyword logon_group: The Logon Group for the SAP System. Type: string (or Expression with + resultType string). + :paramtype logon_group: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(SapOpenHubLinkedService, self).__init__(**kwargs) self.type = 'SapOpenHub' # type: str self.server = kwargs.get('server', None) @@ -30938,43 +42963,43 @@ class SapOpenHubSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param exclude_last_request: Whether to exclude the records of the last request. The default + :vartype additional_columns: any + :ivar exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). - :type exclude_last_request: any - :param base_request_id: The ID of request for delta loading. Once it is set, only data with + :vartype exclude_last_request: any + :ivar base_request_id: The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). - :type base_request_id: any - :param custom_rfc_read_table_function_module: Specifies the custom RFC function module that - will be used to read data from SAP Table. Type: string (or Expression with resultType string). - :type custom_rfc_read_table_function_module: any - :param sap_data_column_delimiter: The single character that will be used as delimiter passed to + :vartype base_request_id: any + :ivar custom_rfc_read_table_function_module: Specifies the custom RFC function module that will + be used to read data from SAP Table. Type: string (or Expression with resultType string). + :vartype custom_rfc_read_table_function_module: any + :ivar sap_data_column_delimiter: The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). - :type sap_data_column_delimiter: any + :vartype sap_data_column_delimiter: any """ _validation = { @@ -31000,6 +43025,43 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword exclude_last_request: Whether to exclude the records of the last request. The default + value is true. Type: boolean (or Expression with resultType boolean). + :paramtype exclude_last_request: any + :keyword base_request_id: The ID of request for delta loading. Once it is set, only data with + requestId larger than the value of this property will be retrieved. The default value is 0. + Type: integer (or Expression with resultType integer ). + :paramtype base_request_id: any + :keyword custom_rfc_read_table_function_module: Specifies the custom RFC function module that + will be used to read data from SAP Table. Type: string (or Expression with resultType string). + :paramtype custom_rfc_read_table_function_module: any + :keyword sap_data_column_delimiter: The single character that will be used as delimiter passed + to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with + resultType string). + :paramtype sap_data_column_delimiter: any + """ super(SapOpenHubSource, self).__init__(**kwargs) self.type = 'SapOpenHubSource' # type: str self.exclude_last_request = kwargs.get('exclude_last_request', None) @@ -31013,38 +43075,38 @@ class SapOpenHubTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param open_hub_destination_name: Required. The name of the Open Hub Destination with + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar open_hub_destination_name: Required. The name of the Open Hub Destination with destination type as Database Table. Type: string (or Expression with resultType string). - :type open_hub_destination_name: any - :param exclude_last_request: Whether to exclude the records of the last request. The default + :vartype open_hub_destination_name: any + :ivar exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). - :type exclude_last_request: any - :param base_request_id: The ID of request for delta loading. Once it is set, only data with + :vartype exclude_last_request: any + :ivar base_request_id: The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). - :type base_request_id: any + :vartype base_request_id: any """ _validation = { @@ -31072,6 +43134,38 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword open_hub_destination_name: Required. The name of the Open Hub Destination with + destination type as Database Table. Type: string (or Expression with resultType string). + :paramtype open_hub_destination_name: any + :keyword exclude_last_request: Whether to exclude the records of the last request. The default + value is true. Type: boolean (or Expression with resultType boolean). + :paramtype exclude_last_request: any + :keyword base_request_id: The ID of request for delta loading. Once it is set, only data with + requestId larger than the value of this property will be retrieved. The default value is 0. + Type: integer (or Expression with resultType integer ). + :paramtype base_request_id: any + """ super(SapOpenHubTableDataset, self).__init__(**kwargs) self.type = 'SapOpenHubTable' # type: str self.open_hub_destination_name = kwargs['open_hub_destination_name'] @@ -31084,69 +43178,69 @@ class SapTableLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param server: Host name of the SAP instance where the table is located. Type: string (or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar server: Host name of the SAP instance where the table is located. Type: string (or Expression with resultType string). - :type server: any - :param system_number: System number of the SAP system where the table is located. (Usually a + :vartype server: any + :ivar system_number: System number of the SAP system where the table is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). - :type system_number: any - :param client_id: Client ID of the client on the SAP system where the table is located. - (Usually a three-digit decimal number represented as a string) Type: string (or Expression with + :vartype system_number: any + :ivar client_id: Client ID of the client on the SAP system where the table is located. (Usually + a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). - :type client_id: any - :param language: Language of the SAP system where the table is located. The default value is - EN. Type: string (or Expression with resultType string). - :type language: any - :param system_id: SystemID of the SAP system where the table is located. Type: string (or + :vartype client_id: any + :ivar language: Language of the SAP system where the table is located. The default value is EN. + Type: string (or Expression with resultType string). + :vartype language: any + :ivar system_id: SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). - :type system_id: any - :param user_name: Username to access the SAP server where the table is located. Type: string - (or Expression with resultType string). - :type user_name: any - :param password: Password to access the SAP server where the table is located. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param message_server: The hostname of the SAP Message Server. Type: string (or Expression with + :vartype system_id: any + :ivar user_name: Username to access the SAP server where the table is located. Type: string (or + Expression with resultType string). + :vartype user_name: any + :ivar password: Password to access the SAP server where the table is located. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar message_server: The hostname of the SAP Message Server. Type: string (or Expression with resultType string). - :type message_server: any - :param message_server_service: The service name or port number of the Message Server. Type: + :vartype message_server: any + :ivar message_server_service: The service name or port number of the Message Server. Type: string (or Expression with resultType string). - :type message_server_service: any - :param snc_mode: SNC activation indicator to access the SAP server where the table is located. + :vartype message_server_service: any + :ivar snc_mode: SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). - :type snc_mode: any - :param snc_my_name: Initiator's SNC name to access the SAP server where the table is located. + :vartype snc_mode: any + :ivar snc_my_name: Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :type snc_my_name: any - :param snc_partner_name: Communication partner's SNC name to access the SAP server where the + :vartype snc_my_name: any + :ivar snc_partner_name: Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :type snc_partner_name: any - :param snc_library_path: External security product's library to access the SAP server where the + :vartype snc_partner_name: any + :ivar snc_library_path: External security product's library to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :type snc_library_path: any - :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string + :vartype snc_library_path: any + :ivar snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string (or Expression with resultType string). - :type snc_qop: any - :param logon_group: The Logon Group for the SAP System. Type: string (or Expression with + :vartype snc_qop: any + :ivar logon_group: The Logon Group for the SAP System. Type: string (or Expression with resultType string). - :type logon_group: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype logon_group: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -31182,6 +43276,69 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword server: Host name of the SAP instance where the table is located. Type: string (or + Expression with resultType string). + :paramtype server: any + :keyword system_number: System number of the SAP system where the table is located. (Usually a + two-digit decimal number represented as a string.) Type: string (or Expression with resultType + string). + :paramtype system_number: any + :keyword client_id: Client ID of the client on the SAP system where the table is located. + (Usually a three-digit decimal number represented as a string) Type: string (or Expression with + resultType string). + :paramtype client_id: any + :keyword language: Language of the SAP system where the table is located. The default value is + EN. Type: string (or Expression with resultType string). + :paramtype language: any + :keyword system_id: SystemID of the SAP system where the table is located. Type: string (or + Expression with resultType string). + :paramtype system_id: any + :keyword user_name: Username to access the SAP server where the table is located. Type: string + (or Expression with resultType string). + :paramtype user_name: any + :keyword password: Password to access the SAP server where the table is located. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword message_server: The hostname of the SAP Message Server. Type: string (or Expression + with resultType string). + :paramtype message_server: any + :keyword message_server_service: The service name or port number of the Message Server. Type: + string (or Expression with resultType string). + :paramtype message_server_service: any + :keyword snc_mode: SNC activation indicator to access the SAP server where the table is + located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). + :paramtype snc_mode: any + :keyword snc_my_name: Initiator's SNC name to access the SAP server where the table is located. + Type: string (or Expression with resultType string). + :paramtype snc_my_name: any + :keyword snc_partner_name: Communication partner's SNC name to access the SAP server where the + table is located. Type: string (or Expression with resultType string). + :paramtype snc_partner_name: any + :keyword snc_library_path: External security product's library to access the SAP server where + the table is located. Type: string (or Expression with resultType string). + :paramtype snc_library_path: any + :keyword snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string + (or Expression with resultType string). + :paramtype snc_qop: any + :keyword logon_group: The Logon Group for the SAP System. Type: string (or Expression with + resultType string). + :paramtype logon_group: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(SapTableLinkedService, self).__init__(**kwargs) self.type = 'SapTable' # type: str self.server = kwargs.get('server', None) @@ -31205,20 +43362,20 @@ def __init__( class SapTablePartitionSettings(msrest.serialization.Model): """The settings that will be leveraged for SAP table source partitioning. - :param partition_column_name: The name of the column that will be used for proceeding range + :ivar partition_column_name: The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: any - :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + :vartype partition_column_name: any + :ivar partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_upper_bound: any - :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + :vartype partition_upper_bound: any + :ivar partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_lower_bound: any - :param max_partitions_number: The maximum value of partitions the table will be split into. + :vartype partition_lower_bound: any + :ivar max_partitions_number: The maximum value of partitions the table will be split into. Type: integer (or Expression with resultType string). - :type max_partitions_number: any + :vartype max_partitions_number: any """ _attribute_map = { @@ -31232,6 +43389,22 @@ def __init__( self, **kwargs ): + """ + :keyword partition_column_name: The name of the column that will be used for proceeding range + partitioning. Type: string (or Expression with resultType string). + :paramtype partition_column_name: any + :keyword partition_upper_bound: The maximum value of column specified in partitionColumnName + that will be used for proceeding range partitioning. Type: string (or Expression with + resultType string). + :paramtype partition_upper_bound: any + :keyword partition_lower_bound: The minimum value of column specified in partitionColumnName + that will be used for proceeding range partitioning. Type: string (or Expression with + resultType string). + :paramtype partition_lower_bound: any + :keyword max_partitions_number: The maximum value of partitions the table will be split into. + Type: integer (or Expression with resultType string). + :paramtype max_partitions_number: any + """ super(SapTablePartitionSettings, self).__init__(**kwargs) self.partition_column_name = kwargs.get('partition_column_name', None) self.partition_upper_bound = kwargs.get('partition_upper_bound', None) @@ -31244,31 +43417,31 @@ class SapTableResourceDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: Required. The name of the SAP Table. Type: string (or Expression with + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: Required. The name of the SAP Table. Type: string (or Expression with resultType string). - :type table_name: any + :vartype table_name: any """ _validation = { @@ -31294,6 +43467,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: Required. The name of the SAP Table. Type: string (or Expression with + resultType string). + :paramtype table_name: any + """ super(SapTableResourceDataset, self).__init__(**kwargs) self.type = 'SapTableResource' # type: str self.table_name = kwargs['table_name'] @@ -31304,58 +43502,58 @@ class SapTableSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param row_count: The number of rows to be retrieved. Type: integer(or Expression with + :vartype additional_columns: any + :ivar row_count: The number of rows to be retrieved. Type: integer(or Expression with resultType integer). - :type row_count: any - :param row_skips: The number of rows that will be skipped. Type: integer (or Expression with + :vartype row_count: any + :ivar row_skips: The number of rows that will be skipped. Type: integer (or Expression with resultType integer). - :type row_skips: any - :param rfc_table_fields: The fields of the SAP table that will be retrieved. For example, + :vartype row_skips: any + :ivar rfc_table_fields: The fields of the SAP table that will be retrieved. For example, column0, column1. Type: string (or Expression with resultType string). - :type rfc_table_fields: any - :param rfc_table_options: The options for the filtering of the SAP Table. For example, COLUMN0 + :vartype rfc_table_fields: any + :ivar rfc_table_options: The options for the filtering of the SAP Table. For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with resultType string). - :type rfc_table_options: any - :param batch_size: Specifies the maximum number of rows that will be retrieved at a time when + :vartype rfc_table_options: any + :ivar batch_size: Specifies the maximum number of rows that will be retrieved at a time when retrieving data from SAP Table. Type: integer (or Expression with resultType integer). - :type batch_size: any - :param custom_rfc_read_table_function_module: Specifies the custom RFC function module that - will be used to read data from SAP Table. Type: string (or Expression with resultType string). - :type custom_rfc_read_table_function_module: any - :param sap_data_column_delimiter: The single character that will be used as delimiter passed to + :vartype batch_size: any + :ivar custom_rfc_read_table_function_module: Specifies the custom RFC function module that will + be used to read data from SAP Table. Type: string (or Expression with resultType string). + :vartype custom_rfc_read_table_function_module: any + :ivar sap_data_column_delimiter: The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). - :type sap_data_column_delimiter: any - :param partition_option: The partition mechanism that will be used for SAP table read in + :vartype sap_data_column_delimiter: any + :ivar partition_option: The partition mechanism that will be used for SAP table read in parallel. Possible values include: "None", "PartitionOnInt", "PartitionOnCalendarYear", "PartitionOnCalendarMonth", "PartitionOnCalendarDate", "PartitionOnTime". - :type partition_option: any - :param partition_settings: The settings that will be leveraged for SAP table source + :vartype partition_option: any + :ivar partition_settings: The settings that will be leveraged for SAP table source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.SapTablePartitionSettings + :vartype partition_settings: ~azure.mgmt.datafactory.models.SapTablePartitionSettings """ _validation = { @@ -31386,6 +43584,58 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword row_count: The number of rows to be retrieved. Type: integer(or Expression with + resultType integer). + :paramtype row_count: any + :keyword row_skips: The number of rows that will be skipped. Type: integer (or Expression with + resultType integer). + :paramtype row_skips: any + :keyword rfc_table_fields: The fields of the SAP table that will be retrieved. For example, + column0, column1. Type: string (or Expression with resultType string). + :paramtype rfc_table_fields: any + :keyword rfc_table_options: The options for the filtering of the SAP Table. For example, + COLUMN0 EQ SOME VALUE. Type: string (or Expression with resultType string). + :paramtype rfc_table_options: any + :keyword batch_size: Specifies the maximum number of rows that will be retrieved at a time when + retrieving data from SAP Table. Type: integer (or Expression with resultType integer). + :paramtype batch_size: any + :keyword custom_rfc_read_table_function_module: Specifies the custom RFC function module that + will be used to read data from SAP Table. Type: string (or Expression with resultType string). + :paramtype custom_rfc_read_table_function_module: any + :keyword sap_data_column_delimiter: The single character that will be used as delimiter passed + to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with + resultType string). + :paramtype sap_data_column_delimiter: any + :keyword partition_option: The partition mechanism that will be used for SAP table read in + parallel. Possible values include: "None", "PartitionOnInt", "PartitionOnCalendarYear", + "PartitionOnCalendarMonth", "PartitionOnCalendarDate", "PartitionOnTime". + :paramtype partition_option: any + :keyword partition_settings: The settings that will be leveraged for SAP table source + partitioning. + :paramtype partition_settings: ~azure.mgmt.datafactory.models.SapTablePartitionSettings + """ super(SapTableSource, self).__init__(**kwargs) self.type = 'SapTableSource' # type: str self.row_count = kwargs.get('row_count', None) @@ -31406,22 +43656,22 @@ class ScheduleTrigger(MultiplePipelineTrigger): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. Trigger type.Constant filled by server. + :vartype type: str + :ivar description: Trigger description. + :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[any] - :param pipelines: Pipelines that need to be started. - :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param recurrence: Required. Recurrence schedule configuration. - :type recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence + :ivar annotations: List of tags that can be used for describing the trigger. + :vartype annotations: list[any] + :ivar pipelines: Pipelines that need to be started. + :vartype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :ivar recurrence: Required. Recurrence schedule configuration. + :vartype recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence """ _validation = { @@ -31444,6 +43694,19 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Trigger description. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the trigger. + :paramtype annotations: list[any] + :keyword pipelines: Pipelines that need to be started. + :paramtype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :keyword recurrence: Required. Recurrence schedule configuration. + :paramtype recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence + """ super(ScheduleTrigger, self).__init__(**kwargs) self.type = 'ScheduleTrigger' # type: str self.recurrence = kwargs['recurrence'] @@ -31452,22 +43715,22 @@ def __init__( class ScheduleTriggerRecurrence(msrest.serialization.Model): """The workflow trigger recurrence. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param frequency: The frequency. Possible values include: "NotSpecified", "Minute", "Hour", + :vartype additional_properties: dict[str, any] + :ivar frequency: The frequency. Possible values include: "NotSpecified", "Minute", "Hour", "Day", "Week", "Month", "Year". - :type frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency - :param interval: The interval. - :type interval: int - :param start_time: The start time. - :type start_time: ~datetime.datetime - :param end_time: The end time. - :type end_time: ~datetime.datetime - :param time_zone: The time zone. - :type time_zone: str - :param schedule: The recurrence schedule. - :type schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule + :vartype frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency + :ivar interval: The interval. + :vartype interval: int + :ivar start_time: The start time. + :vartype start_time: ~datetime.datetime + :ivar end_time: The end time. + :vartype end_time: ~datetime.datetime + :ivar time_zone: The time zone. + :vartype time_zone: str + :ivar schedule: The recurrence schedule. + :vartype schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule """ _attribute_map = { @@ -31484,6 +43747,24 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword frequency: The frequency. Possible values include: "NotSpecified", "Minute", "Hour", + "Day", "Week", "Month", "Year". + :paramtype frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency + :keyword interval: The interval. + :paramtype interval: int + :keyword start_time: The start time. + :paramtype start_time: ~datetime.datetime + :keyword end_time: The end time. + :paramtype end_time: ~datetime.datetime + :keyword time_zone: The time zone. + :paramtype time_zone: str + :keyword schedule: The recurrence schedule. + :paramtype schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule + """ super(ScheduleTriggerRecurrence, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.frequency = kwargs.get('frequency', None) @@ -31499,14 +43780,14 @@ class ScriptAction(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param name: Required. The user provided name of the script action. - :type name: str - :param uri: Required. The URI for the script action. - :type uri: str - :param roles: Required. The node types on which the script action should be executed. - :type roles: str - :param parameters: The parameters for the script action. - :type parameters: str + :ivar name: Required. The user provided name of the script action. + :vartype name: str + :ivar uri: Required. The URI for the script action. + :vartype uri: str + :ivar roles: Required. The node types on which the script action should be executed. + :vartype roles: str + :ivar parameters: The parameters for the script action. + :vartype parameters: str """ _validation = { @@ -31526,6 +43807,16 @@ def __init__( self, **kwargs ): + """ + :keyword name: Required. The user provided name of the script action. + :paramtype name: str + :keyword uri: Required. The URI for the script action. + :paramtype uri: str + :keyword roles: Required. The node types on which the script action should be executed. + :paramtype roles: str + :keyword parameters: The parameters for the script action. + :paramtype parameters: str + """ super(ScriptAction, self).__init__(**kwargs) self.name = kwargs['name'] self.uri = kwargs['uri'] @@ -31538,10 +43829,10 @@ class SecureString(SecretBase): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of the secret.Constant filled by server. - :type type: str - :param value: Required. Value of secure string. - :type value: str + :ivar type: Required. Type of the secret.Constant filled by server. + :vartype type: str + :ivar value: Required. Value of secure string. + :vartype value: str """ _validation = { @@ -31558,6 +43849,10 @@ def __init__( self, **kwargs ): + """ + :keyword value: Required. Value of secure string. + :paramtype value: str + """ super(SecureString, self).__init__(**kwargs) self.type = 'SecureString' # type: str self.value = kwargs['value'] @@ -31568,14 +43863,14 @@ class SelfDependencyTumblingWindowTriggerReference(DependencyReference): All required parameters must be populated in order to send to Azure. - :param type: Required. The type of dependency reference.Constant filled by server. - :type type: str - :param offset: Required. Timespan applied to the start time of a tumbling window when - evaluating dependency. - :type offset: str - :param size: The size of the window when evaluating the dependency. If undefined the frequency + :ivar type: Required. The type of dependency reference.Constant filled by server. + :vartype type: str + :ivar offset: Required. Timespan applied to the start time of a tumbling window when evaluating + dependency. + :vartype offset: str + :ivar size: The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be used. - :type size: str + :vartype size: str """ _validation = { @@ -31594,6 +43889,14 @@ def __init__( self, **kwargs ): + """ + :keyword offset: Required. Timespan applied to the start time of a tumbling window when + evaluating dependency. + :paramtype offset: str + :keyword size: The size of the window when evaluating the dependency. If undefined the + frequency of the tumbling window will be used. + :paramtype size: str + """ super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) self.type = 'SelfDependencyTumblingWindowTriggerReference' # type: str self.offset = kwargs['offset'] @@ -31605,16 +43908,16 @@ class SelfHostedIntegrationRuntime(IntegrationRuntime): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of integration runtime.Constant filled by server. Possible values + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". - :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType - :param description: Integration runtime description. - :type description: str - :param linked_info: The base definition of a linked integration runtime. - :type linked_info: ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType + :vartype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType + :ivar description: Integration runtime description. + :vartype description: str + :ivar linked_info: The base definition of a linked integration runtime. + :vartype linked_info: ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType """ _validation = { @@ -31632,6 +43935,15 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Integration runtime description. + :paramtype description: str + :keyword linked_info: The base definition of a linked integration runtime. + :paramtype linked_info: ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType + """ super(SelfHostedIntegrationRuntime, self).__init__(**kwargs) self.type = 'SelfHosted' # type: str self.linked_info = kwargs.get('linked_info', None) @@ -31642,9 +43954,9 @@ class SelfHostedIntegrationRuntimeNode(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] + :vartype additional_properties: dict[str, any] :ivar node_name: Name of the integration runtime node. :vartype node_name: str :ivar machine_name: Machine name of the integration runtime node. @@ -31737,6 +44049,11 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(SelfHostedIntegrationRuntimeNode, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.node_name = None @@ -31766,12 +44083,12 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of integration runtime.Constant filled by server. Possible values + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". - :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType + :vartype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType :ivar data_factory_name: The data factory name which the integration runtime belong to. :vartype data_factory_name: str :ivar state: The state of integration runtime. Possible values include: "Initial", "Stopped", @@ -31789,8 +44106,8 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): ~azure.mgmt.datafactory.models.IntegrationRuntimeInternalChannelEncryptionMode :ivar version: Version of the integration runtime. :vartype version: str - :param nodes: The list of nodes for this integration runtime. - :type nodes: list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] + :ivar nodes: The list of nodes for this integration runtime. + :vartype nodes: list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] :ivar scheduled_update_date: The date at which the integration runtime will be scheduled to update, in ISO8601 format. :vartype scheduled_update_date: ~datetime.datetime @@ -31808,9 +44125,9 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): :vartype auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate :ivar version_status: Status of the integration runtime version. :vartype version_status: str - :param links: The list of linked integration runtimes that are created to share with this + :ivar links: The list of linked integration runtimes that are created to share with this integration runtime. - :type links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] + :vartype links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] :ivar pushed_version: The version that the integration runtime is going to update to. :vartype pushed_version: str :ivar latest_version: The latest version on download center. @@ -31867,6 +44184,16 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword nodes: The list of nodes for this integration runtime. + :paramtype nodes: list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] + :keyword links: The list of linked integration runtimes that are created to share with this + integration runtime. + :paramtype links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] + """ super(SelfHostedIntegrationRuntimeStatus, self).__init__(**kwargs) self.type = 'SelfHosted' # type: str self.create_time = None @@ -31892,49 +44219,50 @@ class ServiceNowLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar endpoint: Required. The endpoint of the ServiceNow server. (i.e. :code:``.service-now.com). - :type endpoint: any - :param authentication_type: Required. The authentication type to use. Possible values include: + :vartype endpoint: any + :ivar authentication_type: Required. The authentication type to use. Possible values include: "Basic", "OAuth2". - :type authentication_type: str or ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType - :param username: The user name used to connect to the ServiceNow server for Basic and OAuth2 + :vartype authentication_type: str or + ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType + :ivar username: The user name used to connect to the ServiceNow server for Basic and OAuth2 authentication. - :type username: any - :param password: The password corresponding to the user name for Basic and OAuth2 + :vartype username: any + :ivar password: The password corresponding to the user name for Basic and OAuth2 authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param client_id: The client id for OAuth2 authentication. - :type client_id: any - :param client_secret: The client secret for OAuth2 authentication. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar client_id: The client id for OAuth2 authentication. + :vartype client_id: any + :ivar client_secret: The client secret for OAuth2 authentication. + :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -31966,6 +44294,50 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword endpoint: Required. The endpoint of the ServiceNow server. (i.e. + :code:``.service-now.com). + :paramtype endpoint: any + :keyword authentication_type: Required. The authentication type to use. Possible values + include: "Basic", "OAuth2". + :paramtype authentication_type: str or + ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType + :keyword username: The user name used to connect to the ServiceNow server for Basic and OAuth2 + authentication. + :paramtype username: any + :keyword password: The password corresponding to the user name for Basic and OAuth2 + authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword client_id: The client id for OAuth2 authentication. + :paramtype client_id: any + :keyword client_secret: The client secret for OAuth2 authentication. + :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(ServiceNowLinkedService, self).__init__(**kwargs) self.type = 'ServiceNow' # type: str self.endpoint = kwargs['endpoint'] @@ -31985,30 +44357,30 @@ class ServiceNowObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -32033,6 +44405,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(ServiceNowObjectDataset, self).__init__(**kwargs) self.type = 'ServiceNowObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -32043,32 +44439,32 @@ class ServiceNowSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -32091,6 +44487,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(ServiceNowSource, self).__init__(**kwargs) self.type = 'ServiceNowSource' # type: str self.query = kwargs.get('query', None) @@ -32101,21 +44523,21 @@ class ServicePrincipalCredential(Credential): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of credential.Constant filled by server. - :type type: str - :param description: Credential description. - :type description: str - :param annotations: List of tags that can be used for describing the Credential. - :type annotations: list[any] - :param service_principal_id: The app ID of the service principal used to authenticate. - :type service_principal_id: any - :param service_principal_key: The key of the service principal used to authenticate. - :type service_principal_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param tenant: The ID of the tenant to which the service principal belongs. - :type tenant: any + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of credential.Constant filled by server. + :vartype type: str + :ivar description: Credential description. + :vartype description: str + :ivar annotations: List of tags that can be used for describing the Credential. + :vartype annotations: list[any] + :ivar service_principal_id: The app ID of the service principal used to authenticate. + :vartype service_principal_id: any + :ivar service_principal_key: The key of the service principal used to authenticate. + :vartype service_principal_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar tenant: The ID of the tenant to which the service principal belongs. + :vartype tenant: any """ _validation = { @@ -32136,6 +44558,21 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Credential description. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the Credential. + :paramtype annotations: list[any] + :keyword service_principal_id: The app ID of the service principal used to authenticate. + :paramtype service_principal_id: any + :keyword service_principal_key: The key of the service principal used to authenticate. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword tenant: The ID of the tenant to which the service principal belongs. + :paramtype tenant: any + """ super(ServicePrincipalCredential, self).__init__(**kwargs) self.type = 'ServicePrincipal' # type: str self.service_principal_id = kwargs.get('service_principal_id', None) @@ -32148,23 +44585,23 @@ class SetVariableActivity(ControlActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param variable_name: Name of the variable whose value needs to be set. - :type variable_name: str - :param value: Value to be set. Could be a static value or Expression. - :type value: any + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar variable_name: Name of the variable whose value needs to be set. + :vartype variable_name: str + :ivar value: Value to be set. Could be a static value or Expression. + :vartype value: any """ _validation = { @@ -32187,6 +44624,23 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword variable_name: Name of the variable whose value needs to be set. + :paramtype variable_name: str + :keyword value: Value to be set. Could be a static value or Expression. + :paramtype value: any + """ super(SetVariableActivity, self).__init__(**kwargs) self.type = 'SetVariable' # type: str self.variable_name = kwargs.get('variable_name', None) @@ -32198,17 +44652,17 @@ class SftpLocation(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any + :vartype file_name: any """ _validation = { @@ -32226,6 +44680,17 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + """ super(SftpLocation, self).__init__(**kwargs) self.type = 'SftpLocation' # type: str @@ -32235,47 +44700,47 @@ class SftpReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or Expression with + :vartype recursive: any + :ivar wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: any - :param wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType + :vartype wildcard_folder_path: any + :ivar wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype wildcard_file_name: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any - :param file_list_path: Point to a text file that lists each file (relative path to the path + :vartype partition_root_path: any + :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: any - :param delete_files_after_completion: Indicates whether the source files need to be deleted + :vartype file_list_path: any + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: any - :param modified_datetime_start: The start of file's modified datetime. Type: string (or + :vartype delete_files_after_completion: any + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any - :param disable_chunking: If true, disable parallel reading within each file. Default is false. + :vartype modified_datetime_end: any + :ivar disable_chunking: If true, disable parallel reading within each file. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_chunking: any + :vartype disable_chunking: any """ _validation = { @@ -32303,6 +44768,47 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or Expression with + resultType string). + :paramtype wildcard_folder_path: any + :keyword wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType + string). + :paramtype wildcard_file_name: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: any + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: any + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + :keyword disable_chunking: If true, disable parallel reading within each file. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_chunking: any + """ super(SftpReadSettings, self).__init__(**kwargs) self.type = 'SftpReadSettings' # type: str self.recursive = kwargs.get('recursive', None) @@ -32322,56 +44828,56 @@ class SftpServerLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. The SFTP server host name. Type: string (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. The SFTP server host name. Type: string (or Expression with resultType string). - :type host: any - :param port: The TCP port number that the SFTP server uses to listen for client connections. + :vartype host: any + :ivar port: The TCP port number that the SFTP server uses to listen for client connections. Default value is 22. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: any - :param authentication_type: The authentication type to be used to connect to the FTP server. + :vartype port: any + :ivar authentication_type: The authentication type to be used to connect to the FTP server. Possible values include: "Basic", "SshPublicKey", "MultiFactor". - :type authentication_type: str or ~azure.mgmt.datafactory.models.SftpAuthenticationType - :param user_name: The username used to log on to the SFTP server. Type: string (or Expression + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.SftpAuthenticationType + :ivar user_name: The username used to log on to the SFTP server. Type: string (or Expression with resultType string). - :type user_name: any - :param password: Password to logon the SFTP server for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype user_name: any + :ivar password: Password to logon the SFTP server for Basic authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param private_key_path: The SSH private key file path for SshPublicKey authentication. Only + :vartype encrypted_credential: any + :ivar private_key_path: The SSH private key file path for SshPublicKey authentication. Only valid for on-premises copy. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. Type: string (or Expression with resultType string). - :type private_key_path: any - :param private_key_content: Base64 encoded SSH private key content for SshPublicKey + :vartype private_key_path: any + :ivar private_key_content: Base64 encoded SSH private key content for SshPublicKey authentication. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. - :type private_key_content: ~azure.mgmt.datafactory.models.SecretBase - :param pass_phrase: The password to decrypt the SSH private key if the SSH private key is + :vartype private_key_content: ~azure.mgmt.datafactory.models.SecretBase + :ivar pass_phrase: The password to decrypt the SSH private key if the SSH private key is encrypted. - :type pass_phrase: ~azure.mgmt.datafactory.models.SecretBase - :param skip_host_key_validation: If true, skip the SSH host key validation. Default value is + :vartype pass_phrase: ~azure.mgmt.datafactory.models.SecretBase + :ivar skip_host_key_validation: If true, skip the SSH host key validation. Default value is false. Type: boolean (or Expression with resultType boolean). - :type skip_host_key_validation: any - :param host_key_fingerprint: The host key finger-print of the SFTP server. When + :vartype skip_host_key_validation: any + :ivar host_key_fingerprint: The host key finger-print of the SFTP server. When SkipHostKeyValidation is false, HostKeyFingerprint should be specified. Type: string (or Expression with resultType string). - :type host_key_fingerprint: any + :vartype host_key_fingerprint: any """ _validation = { @@ -32403,6 +44909,56 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. The SFTP server host name. Type: string (or Expression with resultType + string). + :paramtype host: any + :keyword port: The TCP port number that the SFTP server uses to listen for client connections. + Default value is 22. Type: integer (or Expression with resultType integer), minimum: 0. + :paramtype port: any + :keyword authentication_type: The authentication type to be used to connect to the FTP server. + Possible values include: "Basic", "SshPublicKey", "MultiFactor". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.SftpAuthenticationType + :keyword user_name: The username used to log on to the SFTP server. Type: string (or Expression + with resultType string). + :paramtype user_name: any + :keyword password: Password to logon the SFTP server for Basic authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword private_key_path: The SSH private key file path for SshPublicKey authentication. Only + valid for on-premises copy. For on-premises copy with SshPublicKey authentication, either + PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH + format. Type: string (or Expression with resultType string). + :paramtype private_key_path: any + :keyword private_key_content: Base64 encoded SSH private key content for SshPublicKey + authentication. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or + PrivateKeyContent should be specified. SSH private key should be OpenSSH format. + :paramtype private_key_content: ~azure.mgmt.datafactory.models.SecretBase + :keyword pass_phrase: The password to decrypt the SSH private key if the SSH private key is + encrypted. + :paramtype pass_phrase: ~azure.mgmt.datafactory.models.SecretBase + :keyword skip_host_key_validation: If true, skip the SSH host key validation. Default value is + false. Type: boolean (or Expression with resultType boolean). + :paramtype skip_host_key_validation: any + :keyword host_key_fingerprint: The host key finger-print of the SFTP server. When + SkipHostKeyValidation is false, HostKeyFingerprint should be specified. Type: string (or + Expression with resultType string). + :paramtype host_key_fingerprint: any + """ super(SftpServerLinkedService, self).__init__(**kwargs) self.type = 'Sftp' # type: str self.host = kwargs['host'] @@ -32423,26 +44979,26 @@ class SftpWriteSettings(StoreWriteSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: any - :param operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default + :vartype disable_metrics_collection: any + :ivar copy_behavior: The type of copy behavior for copy sink. + :vartype copy_behavior: any + :ivar operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). - :type operation_timeout: any - :param use_temp_file_rename: Upload to temporary file(s) and rename. Disable this option if - your SFTP server doesn't support rename operation. Type: boolean (or Expression with resultType + :vartype operation_timeout: any + :ivar use_temp_file_rename: Upload to temporary file(s) and rename. Disable this option if your + SFTP server doesn't support rename operation. Type: boolean (or Expression with resultType boolean). - :type use_temp_file_rename: any + :vartype use_temp_file_rename: any """ _validation = { @@ -32463,6 +45019,26 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword copy_behavior: The type of copy behavior for copy sink. + :paramtype copy_behavior: any + :keyword operation_timeout: Specifies the timeout for writing each chunk to SFTP server. + Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). + :paramtype operation_timeout: any + :keyword use_temp_file_rename: Upload to temporary file(s) and rename. Disable this option if + your SFTP server doesn't support rename operation. Type: boolean (or Expression with resultType + boolean). + :paramtype use_temp_file_rename: any + """ super(SftpWriteSettings, self).__init__(**kwargs) self.type = 'SftpWriteSettings' # type: str self.operation_timeout = kwargs.get('operation_timeout', None) @@ -32474,38 +45050,38 @@ class SharePointOnlineListLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param site_url: Required. The URL of the SharePoint Online site. For example, + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar site_url: Required. The URL of the SharePoint Online site. For example, https://contoso.sharepoint.com/sites/siteName. Type: string (or Expression with resultType string). - :type site_url: any - :param tenant_id: Required. The tenant ID under which your application resides. You can find it + :vartype site_url: any + :ivar tenant_id: Required. The tenant ID under which your application resides. You can find it from Azure portal Active Directory overview page. Type: string (or Expression with resultType string). - :type tenant_id: any - :param service_principal_id: Required. The application (client) ID of your application + :vartype tenant_id: any + :ivar service_principal_id: Required. The application (client) ID of your application registered in Azure Active Directory. Make sure to grant SharePoint site permission to this application. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: Required. The client secret of your application registered in + :vartype service_principal_id: any + :ivar service_principal_key: Required. The client secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -32534,6 +45110,38 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword site_url: Required. The URL of the SharePoint Online site. For example, + https://contoso.sharepoint.com/sites/siteName. Type: string (or Expression with resultType + string). + :paramtype site_url: any + :keyword tenant_id: Required. The tenant ID under which your application resides. You can find + it from Azure portal Active Directory overview page. Type: string (or Expression with + resultType string). + :paramtype tenant_id: any + :keyword service_principal_id: Required. The application (client) ID of your application + registered in Azure Active Directory. Make sure to grant SharePoint site permission to this + application. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: Required. The client secret of your application registered in + Azure Active Directory. Type: string (or Expression with resultType string). + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(SharePointOnlineListLinkedService, self).__init__(**kwargs) self.type = 'SharePointOnlineList' # type: str self.site_url = kwargs['site_url'] @@ -32548,31 +45156,31 @@ class SharePointOnlineListResourceDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param list_name: The name of the SharePoint Online list. Type: string (or Expression with + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar list_name: The name of the SharePoint Online list. Type: string (or Expression with resultType string). - :type list_name: any + :vartype list_name: any """ _validation = { @@ -32597,6 +45205,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword list_name: The name of the SharePoint Online list. Type: string (or Expression with + resultType string). + :paramtype list_name: any + """ super(SharePointOnlineListResourceDataset, self).__init__(**kwargs) self.type = 'SharePointOnlineListResource' # type: str self.list_name = kwargs.get('list_name', None) @@ -32607,30 +45240,30 @@ class SharePointOnlineListSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: The OData query to filter the data in SharePoint Online list. For example, + :vartype disable_metrics_collection: any + :ivar query: The OData query to filter the data in SharePoint Online list. For example, "$top=1". Type: string (or Expression with resultType string). - :type query: any - :param http_request_timeout: The wait time to get a response from SharePoint Online. Default + :vartype query: any + :ivar http_request_timeout: The wait time to get a response from SharePoint Online. Default value is 5 minutes (00:05:00). Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: any + :vartype http_request_timeout: any """ _validation = { @@ -32652,6 +45285,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: The OData query to filter the data in SharePoint Online list. For example, + "$top=1". Type: string (or Expression with resultType string). + :paramtype query: any + :keyword http_request_timeout: The wait time to get a response from SharePoint Online. Default + value is 5 minutes (00:05:00). Type: string (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype http_request_timeout: any + """ super(SharePointOnlineListSource, self).__init__(**kwargs) self.type = 'SharePointOnlineListSource' # type: str self.query = kwargs.get('query', None) @@ -32663,38 +45320,38 @@ class ShopifyLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. The endpoint of the Shopify server. (i.e. mystore.myshopify.com). - :type host: any - :param access_token: The API access token that can be used to access Shopify’s data. The token + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. The endpoint of the Shopify server. (i.e. mystore.myshopify.com). + :vartype host: any + :ivar access_token: The API access token that can be used to access Shopify’s data. The token won't expire if it is offline mode. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype access_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -32721,6 +45378,38 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. The endpoint of the Shopify server. (i.e. mystore.myshopify.com). + :paramtype host: any + :keyword access_token: The API access token that can be used to access Shopify’s data. The + token won't expire if it is offline mode. + :paramtype access_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(ShopifyLinkedService, self).__init__(**kwargs) self.type = 'Shopify' # type: str self.host = kwargs['host'] @@ -32736,30 +45425,30 @@ class ShopifyObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -32784,6 +45473,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(ShopifyObjectDataset, self).__init__(**kwargs) self.type = 'ShopifyObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -32794,32 +45507,32 @@ class ShopifySource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -32842,6 +45555,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(ShopifySource, self).__init__(**kwargs) self.type = 'ShopifySource' # type: str self.query = kwargs.get('query', None) @@ -32850,12 +45589,12 @@ def __init__( class SkipErrorFile(msrest.serialization.Model): """Skip error file. - :param file_missing: Skip if file is deleted by other client during copy. Default is true. - Type: boolean (or Expression with resultType boolean). - :type file_missing: any - :param data_inconsistency: Skip if source/sink file changed by other concurrent write. Default + :ivar file_missing: Skip if file is deleted by other client during copy. Default is true. Type: + boolean (or Expression with resultType boolean). + :vartype file_missing: any + :ivar data_inconsistency: Skip if source/sink file changed by other concurrent write. Default is false. Type: boolean (or Expression with resultType boolean). - :type data_inconsistency: any + :vartype data_inconsistency: any """ _attribute_map = { @@ -32867,6 +45606,14 @@ def __init__( self, **kwargs ): + """ + :keyword file_missing: Skip if file is deleted by other client during copy. Default is true. + Type: boolean (or Expression with resultType boolean). + :paramtype file_missing: any + :keyword data_inconsistency: Skip if source/sink file changed by other concurrent write. + Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype data_inconsistency: any + """ super(SkipErrorFile, self).__init__(**kwargs) self.file_missing = kwargs.get('file_missing', None) self.data_inconsistency = kwargs.get('data_inconsistency', None) @@ -32877,34 +45624,34 @@ class SnowflakeDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param schema_type_properties_schema: The schema name of the Snowflake database. Type: string + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar schema_type_properties_schema: The schema name of the Snowflake database. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any - :param table: The table name of the Snowflake database. Type: string (or Expression with + :vartype schema_type_properties_schema: any + :ivar table: The table name of the Snowflake database. Type: string (or Expression with resultType string). - :type table: any + :vartype table: any """ _validation = { @@ -32930,6 +45677,34 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword schema_type_properties_schema: The schema name of the Snowflake database. Type: string + (or Expression with resultType string). + :paramtype schema_type_properties_schema: any + :keyword table: The table name of the Snowflake database. Type: string (or Expression with + resultType string). + :paramtype table: any + """ super(SnowflakeDataset, self).__init__(**kwargs) self.type = 'SnowflakeTable' # type: str self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -32941,21 +45716,21 @@ class SnowflakeExportCopyCommand(ExportSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The export setting type.Constant filled by server. - :type type: str - :param additional_copy_options: Additional copy options directly passed to snowflake Copy + :vartype additional_properties: dict[str, any] + :ivar type: Required. The export setting type.Constant filled by server. + :vartype type: str + :ivar additional_copy_options: Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": "'HH24:MI:SS.FF'" }. - :type additional_copy_options: dict[str, any] - :param additional_format_options: Additional format options directly passed to snowflake Copy + :vartype additional_copy_options: dict[str, any] + :ivar additional_format_options: Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": "'FALSE'" }. - :type additional_format_options: dict[str, any] + :vartype additional_format_options: dict[str, any] """ _validation = { @@ -32973,6 +45748,21 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword additional_copy_options: Additional copy options directly passed to snowflake Copy + Command. Type: key value pairs (value should be string type) (or Expression with resultType + object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": + "'HH24:MI:SS.FF'" }. + :paramtype additional_copy_options: dict[str, any] + :keyword additional_format_options: Additional format options directly passed to snowflake Copy + Command. Type: key value pairs (value should be string type) (or Expression with resultType + object). Example: "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": "'FALSE'" + }. + :paramtype additional_format_options: dict[str, any] + """ super(SnowflakeExportCopyCommand, self).__init__(**kwargs) self.type = 'SnowflakeExportCopyCommand' # type: str self.additional_copy_options = kwargs.get('additional_copy_options', None) @@ -32984,21 +45774,21 @@ class SnowflakeImportCopyCommand(ImportSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The import setting type.Constant filled by server. - :type type: str - :param additional_copy_options: Additional copy options directly passed to snowflake Copy + :vartype additional_properties: dict[str, any] + :ivar type: Required. The import setting type.Constant filled by server. + :vartype type: str + :ivar additional_copy_options: Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": "'HH24:MI:SS.FF'" }. - :type additional_copy_options: dict[str, any] - :param additional_format_options: Additional format options directly passed to snowflake Copy + :vartype additional_copy_options: dict[str, any] + :ivar additional_format_options: Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": "'FALSE'" }. - :type additional_format_options: dict[str, any] + :vartype additional_format_options: dict[str, any] """ _validation = { @@ -33016,6 +45806,21 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword additional_copy_options: Additional copy options directly passed to snowflake Copy + Command. Type: key value pairs (value should be string type) (or Expression with resultType + object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": + "'HH24:MI:SS.FF'" }. + :paramtype additional_copy_options: dict[str, any] + :keyword additional_format_options: Additional format options directly passed to snowflake Copy + Command. Type: key value pairs (value should be string type) (or Expression with resultType + object). Example: "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": + "'FALSE'" }. + :paramtype additional_format_options: dict[str, any] + """ super(SnowflakeImportCopyCommand, self).__init__(**kwargs) self.type = 'SnowflakeImportCopyCommand' # type: str self.additional_copy_options = kwargs.get('additional_copy_options', None) @@ -33027,28 +45832,28 @@ class SnowflakeLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The connection string of snowflake. Type: string, + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The connection string of snowflake. Type: string, SecureString. - :type connection_string: any - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_string: any + :ivar password: The Azure key vault secret reference of password in connection string. + :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -33072,6 +45877,28 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The connection string of snowflake. Type: string, + SecureString. + :paramtype connection_string: any + :keyword password: The Azure key vault secret reference of password in connection string. + :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(SnowflakeLinkedService, self).__init__(**kwargs) self.type = 'Snowflake' # type: str self.connection_string = kwargs['connection_string'] @@ -33084,34 +45911,34 @@ class SnowflakeSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + :vartype disable_metrics_collection: any + :ivar pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: any - :param import_settings: Snowflake import settings. - :type import_settings: ~azure.mgmt.datafactory.models.SnowflakeImportCopyCommand + :vartype pre_copy_script: any + :ivar import_settings: Snowflake import settings. + :vartype import_settings: ~azure.mgmt.datafactory.models.SnowflakeImportCopyCommand """ _validation = { @@ -33135,6 +45962,34 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :paramtype pre_copy_script: any + :keyword import_settings: Snowflake import settings. + :paramtype import_settings: ~azure.mgmt.datafactory.models.SnowflakeImportCopyCommand + """ super(SnowflakeSink, self).__init__(**kwargs) self.type = 'SnowflakeSink' # type: str self.pre_copy_script = kwargs.get('pre_copy_script', None) @@ -33146,27 +46001,27 @@ class SnowflakeSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: Snowflake Sql query. Type: string (or Expression with resultType string). - :type query: any - :param export_settings: Snowflake export settings. - :type export_settings: ~azure.mgmt.datafactory.models.SnowflakeExportCopyCommand + :vartype disable_metrics_collection: any + :ivar query: Snowflake Sql query. Type: string (or Expression with resultType string). + :vartype query: any + :ivar export_settings: Snowflake export settings. + :vartype export_settings: ~azure.mgmt.datafactory.models.SnowflakeExportCopyCommand """ _validation = { @@ -33188,6 +46043,27 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: Snowflake Sql query. Type: string (or Expression with resultType string). + :paramtype query: any + :keyword export_settings: Snowflake export settings. + :paramtype export_settings: ~azure.mgmt.datafactory.models.SnowflakeExportCopyCommand + """ super(SnowflakeSource, self).__init__(**kwargs) self.type = 'SnowflakeSource' # type: str self.query = kwargs.get('query', None) @@ -33199,62 +46075,61 @@ class SparkLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. IP address or host name of the Spark server. - :type host: any - :param port: Required. The TCP port that the Spark server uses to listen for client - connections. - :type port: any - :param server_type: The type of Spark server. Possible values include: "SharkServer", + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. IP address or host name of the Spark server. + :vartype host: any + :ivar port: Required. The TCP port that the Spark server uses to listen for client connections. + :vartype port: any + :ivar server_type: The type of Spark server. Possible values include: "SharkServer", "SharkServer2", "SparkThriftServer". - :type server_type: str or ~azure.mgmt.datafactory.models.SparkServerType - :param thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible + :vartype server_type: str or ~azure.mgmt.datafactory.models.SparkServerType + :ivar thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible values include: "Binary", "SASL", "HTTP ". - :type thrift_transport_protocol: str or + :vartype thrift_transport_protocol: str or ~azure.mgmt.datafactory.models.SparkThriftTransportProtocol - :param authentication_type: Required. The authentication method used to access the Spark - server. Possible values include: "Anonymous", "Username", "UsernameAndPassword", + :ivar authentication_type: Required. The authentication method used to access the Spark server. + Possible values include: "Anonymous", "Username", "UsernameAndPassword", "WindowsAzureHDInsightService". - :type authentication_type: str or ~azure.mgmt.datafactory.models.SparkAuthenticationType - :param username: The user name that you use to access Spark Server. - :type username: any - :param password: The password corresponding to the user name that you provided in the Username + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.SparkAuthenticationType + :ivar username: The user name that you use to access Spark Server. + :vartype username: any + :ivar password: The password corresponding to the user name that you provided in the Username field. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param http_path: The partial URL corresponding to the Spark server. - :type http_path: any - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar http_path: The partial URL corresponding to the Spark server. + :vartype http_path: any + :ivar enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :type enable_ssl: any - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + :vartype enable_ssl: any + :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: any - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + :vartype trusted_cert_path: any + :ivar use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: any - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + :vartype use_system_trust_store: any + :ivar allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: any - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + :vartype allow_host_name_cn_mismatch: any + :ivar allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype allow_self_signed_server_cert: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -33291,6 +46166,62 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. IP address or host name of the Spark server. + :paramtype host: any + :keyword port: Required. The TCP port that the Spark server uses to listen for client + connections. + :paramtype port: any + :keyword server_type: The type of Spark server. Possible values include: "SharkServer", + "SharkServer2", "SparkThriftServer". + :paramtype server_type: str or ~azure.mgmt.datafactory.models.SparkServerType + :keyword thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible + values include: "Binary", "SASL", "HTTP ". + :paramtype thrift_transport_protocol: str or + ~azure.mgmt.datafactory.models.SparkThriftTransportProtocol + :keyword authentication_type: Required. The authentication method used to access the Spark + server. Possible values include: "Anonymous", "Username", "UsernameAndPassword", + "WindowsAzureHDInsightService". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.SparkAuthenticationType + :keyword username: The user name that you use to access Spark Server. + :paramtype username: any + :keyword password: The password corresponding to the user name that you provided in the + Username field. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword http_path: The partial URL corresponding to the Spark server. + :paramtype http_path: any + :keyword enable_ssl: Specifies whether the connections to the server are encrypted using SSL. + The default value is false. + :paramtype enable_ssl: any + :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates + for verifying the server when connecting over SSL. This property can only be set when using SSL + on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :paramtype trusted_cert_path: any + :keyword use_system_trust_store: Specifies whether to use a CA certificate from the system + trust store or from a specified PEM file. The default value is false. + :paramtype use_system_trust_store: any + :keyword allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :paramtype allow_host_name_cn_mismatch: any + :keyword allow_self_signed_server_cert: Specifies whether to allow self-signed certificates + from the server. The default value is false. + :paramtype allow_self_signed_server_cert: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(SparkLinkedService, self).__init__(**kwargs) self.type = 'Spark' # type: str self.host = kwargs['host'] @@ -33314,36 +46245,36 @@ class SparkObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param table: The table name of the Spark. Type: string (or Expression with resultType string). - :type table: any - :param schema_type_properties_schema: The schema name of the Spark. Type: string (or Expression + :vartype table_name: any + :ivar table: The table name of the Spark. Type: string (or Expression with resultType string). + :vartype table: any + :ivar schema_type_properties_schema: The schema name of the Spark. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any + :vartype schema_type_properties_schema: any """ _validation = { @@ -33370,6 +46301,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword table: The table name of the Spark. Type: string (or Expression with resultType + string). + :paramtype table: any + :keyword schema_type_properties_schema: The schema name of the Spark. Type: string (or + Expression with resultType string). + :paramtype schema_type_properties_schema: any + """ super(SparkObjectDataset, self).__init__(**kwargs) self.type = 'SparkObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -33382,32 +46344,32 @@ class SparkSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -33430,6 +46392,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(SparkSource, self).__init__(**kwargs) self.type = 'SparkSource' # type: str self.query = kwargs.get('query', None) @@ -33440,19 +46428,19 @@ class SqlAlwaysEncryptedProperties(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param always_encrypted_akv_auth_type: Required. Sql always encrypted AKV authentication type. + :ivar always_encrypted_akv_auth_type: Required. Sql always encrypted AKV authentication type. Type: string (or Expression with resultType string). Possible values include: "ServicePrincipal", "ManagedIdentity", "UserAssignedManagedIdentity". - :type always_encrypted_akv_auth_type: str or + :vartype always_encrypted_akv_auth_type: str or ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedAkvAuthType - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Azure Key Vault authentication. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: The key of the service principal used to authenticate against + :ivar service_principal_id: The client ID of the application in Azure Active Directory used for + Azure Key Vault authentication. Type: string (or Expression with resultType string). + :vartype service_principal_id: any + :ivar service_principal_key: The key of the service principal used to authenticate against Azure Key Vault. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -33470,6 +46458,21 @@ def __init__( self, **kwargs ): + """ + :keyword always_encrypted_akv_auth_type: Required. Sql always encrypted AKV authentication + type. Type: string (or Expression with resultType string). Possible values include: + "ServicePrincipal", "ManagedIdentity", "UserAssignedManagedIdentity". + :paramtype always_encrypted_akv_auth_type: str or + ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedAkvAuthType + :keyword service_principal_id: The client ID of the application in Azure Active Directory used + for Azure Key Vault authentication. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: The key of the service principal used to authenticate against + Azure Key Vault. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(SqlAlwaysEncryptedProperties, self).__init__(**kwargs) self.always_encrypted_akv_auth_type = kwargs['always_encrypted_akv_auth_type'] self.service_principal_id = kwargs.get('service_principal_id', None) @@ -33482,54 +46485,54 @@ class SqlDWSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + :vartype disable_metrics_collection: any + :ivar pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: any - :param allow_poly_base: Indicates to use PolyBase to copy data into SQL Data Warehouse when + :vartype pre_copy_script: any + :ivar allow_poly_base: Indicates to use PolyBase to copy data into SQL Data Warehouse when applicable. Type: boolean (or Expression with resultType boolean). - :type allow_poly_base: any - :param poly_base_settings: Specifies PolyBase-related settings when allowPolyBase is true. - :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings - :param allow_copy_command: Indicates to use Copy Command to copy data into SQL Data Warehouse. + :vartype allow_poly_base: any + :ivar poly_base_settings: Specifies PolyBase-related settings when allowPolyBase is true. + :vartype poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings + :ivar allow_copy_command: Indicates to use Copy Command to copy data into SQL Data Warehouse. Type: boolean (or Expression with resultType boolean). - :type allow_copy_command: any - :param copy_command_settings: Specifies Copy Command related settings when allowCopyCommand is + :vartype allow_copy_command: any + :ivar copy_command_settings: Specifies Copy Command related settings when allowCopyCommand is true. - :type copy_command_settings: ~azure.mgmt.datafactory.models.DWCopyCommandSettings - :param table_option: The option to handle sink table, such as autoCreate. For now only + :vartype copy_command_settings: ~azure.mgmt.datafactory.models.DWCopyCommandSettings + :ivar table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: any - :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + :vartype table_option: any + :ivar sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). - :type sql_writer_use_table_lock: any - :param write_behavior: Write behavior when copying data into azure SQL DW. Type: + :vartype sql_writer_use_table_lock: any + :ivar write_behavior: Write behavior when copying data into azure SQL DW. Type: SqlDWWriteBehaviorEnum (or Expression with resultType SqlDWWriteBehaviorEnum). - :type write_behavior: any - :param upsert_settings: SQL DW upsert settings. - :type upsert_settings: ~azure.mgmt.datafactory.models.SqlDWUpsertSettings + :vartype write_behavior: any + :ivar upsert_settings: SQL DW upsert settings. + :vartype upsert_settings: ~azure.mgmt.datafactory.models.SqlDWUpsertSettings """ _validation = { @@ -33560,6 +46563,54 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :paramtype pre_copy_script: any + :keyword allow_poly_base: Indicates to use PolyBase to copy data into SQL Data Warehouse when + applicable. Type: boolean (or Expression with resultType boolean). + :paramtype allow_poly_base: any + :keyword poly_base_settings: Specifies PolyBase-related settings when allowPolyBase is true. + :paramtype poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings + :keyword allow_copy_command: Indicates to use Copy Command to copy data into SQL Data + Warehouse. Type: boolean (or Expression with resultType boolean). + :paramtype allow_copy_command: any + :keyword copy_command_settings: Specifies Copy Command related settings when allowCopyCommand + is true. + :paramtype copy_command_settings: ~azure.mgmt.datafactory.models.DWCopyCommandSettings + :keyword table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :paramtype table_option: any + :keyword sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean + (or Expression with resultType boolean). + :paramtype sql_writer_use_table_lock: any + :keyword write_behavior: Write behavior when copying data into azure SQL DW. Type: + SqlDWWriteBehaviorEnum (or Expression with resultType SqlDWWriteBehaviorEnum). + :paramtype write_behavior: any + :keyword upsert_settings: SQL DW upsert settings. + :paramtype upsert_settings: ~azure.mgmt.datafactory.models.SqlDWUpsertSettings + """ super(SqlDWSink, self).__init__(**kwargs) self.type = 'SqlDWSink' # type: str self.pre_copy_script = kwargs.get('pre_copy_script', None) @@ -33578,45 +46629,45 @@ class SqlDWSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with + :vartype additional_columns: any + :ivar sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: any - :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Data Warehouse + :vartype sql_reader_query: any + :ivar sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Data Warehouse source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: any - :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + :vartype sql_reader_stored_procedure_name: any + :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. - :type stored_procedure_parameters: any - :param partition_option: The partition mechanism that will be used for Sql read in parallel. + :vartype stored_procedure_parameters: any + :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: any - :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings + :vartype partition_option: any + :ivar partition_settings: The settings that will be leveraged for Sql source partitioning. + :vartype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -33643,6 +46694,45 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with + resultType string). + :paramtype sql_reader_query: any + :keyword sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Data + Warehouse source. This cannot be used at the same time as SqlReaderQuery. Type: string (or + Expression with resultType string). + :paramtype sql_reader_stored_procedure_name: any + :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType + object), itemType: StoredProcedureParameter. + :paramtype stored_procedure_parameters: any + :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :paramtype partition_option: any + :keyword partition_settings: The settings that will be leveraged for Sql source partitioning. + :paramtype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings + """ super(SqlDWSource, self).__init__(**kwargs) self.type = 'SqlDWSource' # type: str self.sql_reader_query = kwargs.get('sql_reader_query', None) @@ -33655,12 +46745,12 @@ def __init__( class SqlDWUpsertSettings(msrest.serialization.Model): """Sql DW upsert option settings. - :param interim_schema_name: Schema name for interim table. Type: string (or Expression with + :ivar interim_schema_name: Schema name for interim table. Type: string (or Expression with resultType string). - :type interim_schema_name: any - :param keys: Key column names for unique row identification. Type: array of strings (or + :vartype interim_schema_name: any + :ivar keys: Key column names for unique row identification. Type: array of strings (or Expression with resultType array of strings). - :type keys: any + :vartype keys: any """ _attribute_map = { @@ -33672,6 +46762,14 @@ def __init__( self, **kwargs ): + """ + :keyword interim_schema_name: Schema name for interim table. Type: string (or Expression with + resultType string). + :paramtype interim_schema_name: any + :keyword keys: Key column names for unique row identification. Type: array of strings (or + Expression with resultType array of strings). + :paramtype keys: any + """ super(SqlDWUpsertSettings, self).__init__(**kwargs) self.interim_schema_name = kwargs.get('interim_schema_name', None) self.keys = kwargs.get('keys', None) @@ -33682,55 +46780,55 @@ class SqlMISink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + :vartype disable_metrics_collection: any + :ivar sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: any - :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with - resultType string). - :type sql_writer_table_type: any - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + :vartype sql_writer_stored_procedure_name: any + :ivar sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). - :type pre_copy_script: any - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, + :vartype sql_writer_table_type: any + :ivar pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :vartype pre_copy_script: any + :ivar stored_procedure_parameters: SQL stored procedure parameters. + :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + :ivar stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). - :type stored_procedure_table_type_parameter_name: any - :param table_option: The option to handle sink table, such as autoCreate. For now only + :vartype stored_procedure_table_type_parameter_name: any + :ivar table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: any - :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + :vartype table_option: any + :ivar sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). - :type sql_writer_use_table_lock: any - :param write_behavior: White behavior when copying data into azure SQL MI. Type: + :vartype sql_writer_use_table_lock: any + :ivar write_behavior: White behavior when copying data into azure SQL MI. Type: SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). - :type write_behavior: any - :param upsert_settings: SQL upsert settings. - :type upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings + :vartype write_behavior: any + :ivar upsert_settings: SQL upsert settings. + :vartype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings """ _validation = { @@ -33761,6 +46859,55 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + Expression with resultType string). + :paramtype sql_writer_stored_procedure_name: any + :keyword sql_writer_table_type: SQL writer table type. Type: string (or Expression with + resultType string). + :paramtype sql_writer_table_type: any + :keyword pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :paramtype pre_copy_script: any + :keyword stored_procedure_parameters: SQL stored procedure parameters. + :paramtype stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :keyword stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + table type. Type: string (or Expression with resultType string). + :paramtype stored_procedure_table_type_parameter_name: any + :keyword table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :paramtype table_option: any + :keyword sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean + (or Expression with resultType boolean). + :paramtype sql_writer_use_table_lock: any + :keyword write_behavior: White behavior when copying data into azure SQL MI. Type: + SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + :paramtype write_behavior: any + :keyword upsert_settings: SQL upsert settings. + :paramtype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings + """ super(SqlMISink, self).__init__(**kwargs) self.type = 'SqlMISink' # type: str self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) @@ -33779,46 +46926,46 @@ class SqlMISource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: any - :param sql_reader_stored_procedure_name: Name of the stored procedure for a Azure SQL Managed + :vartype additional_columns: any + :ivar sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :vartype sql_reader_query: any + :ivar sql_reader_stored_procedure_name: Name of the stored procedure for a Azure SQL Managed Instance source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: any - :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + :vartype sql_reader_stored_procedure_name: any + :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, + :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: any - :param partition_option: The partition mechanism that will be used for Sql read in parallel. + :ivar produce_additional_types: Which additional types to produce. + :vartype produce_additional_types: any + :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: any - :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings + :vartype partition_option: any + :ivar partition_settings: The settings that will be leveraged for Sql source partitioning. + :vartype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -33846,6 +46993,47 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword sql_reader_query: SQL reader query. Type: string (or Expression with resultType + string). + :paramtype sql_reader_query: any + :keyword sql_reader_stored_procedure_name: Name of the stored procedure for a Azure SQL Managed + Instance source. This cannot be used at the same time as SqlReaderQuery. Type: string (or + Expression with resultType string). + :paramtype sql_reader_stored_procedure_name: any + :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :paramtype stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :keyword produce_additional_types: Which additional types to produce. + :paramtype produce_additional_types: any + :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :paramtype partition_option: any + :keyword partition_settings: The settings that will be leveraged for Sql source partitioning. + :paramtype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings + """ super(SqlMISource, self).__init__(**kwargs) self.type = 'SqlMISource' # type: str self.sql_reader_query = kwargs.get('sql_reader_query', None) @@ -33859,21 +47047,21 @@ def __init__( class SqlPartitionSettings(msrest.serialization.Model): """The settings that will be leveraged for Sql source partitioning. - :param partition_column_name: The name of the column in integer or datetime type that will be + :ivar partition_column_name: The name of the column in integer or datetime type that will be used for proceeding partitioning. If not specified, the primary key of the table is auto-detected and used as the partition column. Type: string (or Expression with resultType string). - :type partition_column_name: any - :param partition_upper_bound: The maximum value of the partition column for partition range + :vartype partition_column_name: any + :ivar partition_upper_bound: The maximum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). - :type partition_upper_bound: any - :param partition_lower_bound: The minimum value of the partition column for partition range + :vartype partition_upper_bound: any + :ivar partition_lower_bound: The minimum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). - :type partition_lower_bound: any + :vartype partition_lower_bound: any """ _attribute_map = { @@ -33886,6 +47074,23 @@ def __init__( self, **kwargs ): + """ + :keyword partition_column_name: The name of the column in integer or datetime type that will be + used for proceeding partitioning. If not specified, the primary key of the table is + auto-detected and used as the partition column. Type: string (or Expression with resultType + string). + :paramtype partition_column_name: any + :keyword partition_upper_bound: The maximum value of the partition column for partition range + splitting. This value is used to decide the partition stride, not for filtering the rows in + table. All rows in the table or query result will be partitioned and copied. Type: string (or + Expression with resultType string). + :paramtype partition_upper_bound: any + :keyword partition_lower_bound: The minimum value of the partition column for partition range + splitting. This value is used to decide the partition stride, not for filtering the rows in + table. All rows in the table or query result will be partitioned and copied. Type: string (or + Expression with resultType string). + :paramtype partition_lower_bound: any + """ super(SqlPartitionSettings, self).__init__(**kwargs) self.partition_column_name = kwargs.get('partition_column_name', None) self.partition_upper_bound = kwargs.get('partition_upper_bound', None) @@ -33897,33 +47102,33 @@ class SqlServerLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param user_name: The on-premises Windows authentication user name. Type: string (or Expression + :vartype connection_string: any + :ivar user_name: The on-premises Windows authentication user name. Type: string (or Expression with resultType string). - :type user_name: any - :param password: The on-premises Windows authentication password. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype user_name: any + :ivar password: The on-premises Windows authentication password. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param always_encrypted_settings: Sql always encrypted properties. - :type always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + :vartype encrypted_credential: any + :ivar always_encrypted_settings: Sql always encrypted properties. + :vartype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties """ _validation = { @@ -33949,6 +47154,34 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword user_name: The on-premises Windows authentication user name. Type: string (or + Expression with resultType string). + :paramtype user_name: any + :keyword password: The on-premises Windows authentication password. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword always_encrypted_settings: Sql always encrypted properties. + :paramtype always_encrypted_settings: + ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + """ super(SqlServerLinkedService, self).__init__(**kwargs) self.type = 'SqlServer' # type: str self.connection_string = kwargs['connection_string'] @@ -33963,55 +47196,55 @@ class SqlServerSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + :vartype disable_metrics_collection: any + :ivar sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: any - :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with - resultType string). - :type sql_writer_table_type: any - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + :vartype sql_writer_stored_procedure_name: any + :ivar sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). - :type pre_copy_script: any - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, + :vartype sql_writer_table_type: any + :ivar pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :vartype pre_copy_script: any + :ivar stored_procedure_parameters: SQL stored procedure parameters. + :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + :ivar stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). - :type stored_procedure_table_type_parameter_name: any - :param table_option: The option to handle sink table, such as autoCreate. For now only + :vartype stored_procedure_table_type_parameter_name: any + :ivar table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: any - :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + :vartype table_option: any + :ivar sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). - :type sql_writer_use_table_lock: any - :param write_behavior: Write behavior when copying data into sql server. Type: + :vartype sql_writer_use_table_lock: any + :ivar write_behavior: Write behavior when copying data into sql server. Type: SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). - :type write_behavior: any - :param upsert_settings: SQL upsert settings. - :type upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings + :vartype write_behavior: any + :ivar upsert_settings: SQL upsert settings. + :vartype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings """ _validation = { @@ -34042,6 +47275,55 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + Expression with resultType string). + :paramtype sql_writer_stored_procedure_name: any + :keyword sql_writer_table_type: SQL writer table type. Type: string (or Expression with + resultType string). + :paramtype sql_writer_table_type: any + :keyword pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :paramtype pre_copy_script: any + :keyword stored_procedure_parameters: SQL stored procedure parameters. + :paramtype stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :keyword stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + table type. Type: string (or Expression with resultType string). + :paramtype stored_procedure_table_type_parameter_name: any + :keyword table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :paramtype table_option: any + :keyword sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean + (or Expression with resultType boolean). + :paramtype sql_writer_use_table_lock: any + :keyword write_behavior: Write behavior when copying data into sql server. Type: + SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + :paramtype write_behavior: any + :keyword upsert_settings: SQL upsert settings. + :paramtype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings + """ super(SqlServerSink, self).__init__(**kwargs) self.type = 'SqlServerSink' # type: str self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) @@ -34060,46 +47342,46 @@ class SqlServerSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: any - :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database - source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression - with resultType string). - :type sql_reader_stored_procedure_name: any - :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + :vartype additional_columns: any + :ivar sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :vartype sql_reader_query: any + :ivar sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. + This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with + resultType string). + :vartype sql_reader_stored_procedure_name: any + :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, + :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: any - :param partition_option: The partition mechanism that will be used for Sql read in parallel. + :ivar produce_additional_types: Which additional types to produce. + :vartype produce_additional_types: any + :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: any - :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings + :vartype partition_option: any + :ivar partition_settings: The settings that will be leveraged for Sql source partitioning. + :vartype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -34127,6 +47409,47 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword sql_reader_query: SQL reader query. Type: string (or Expression with resultType + string). + :paramtype sql_reader_query: any + :keyword sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database + source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression + with resultType string). + :paramtype sql_reader_stored_procedure_name: any + :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :paramtype stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :keyword produce_additional_types: Which additional types to produce. + :paramtype produce_additional_types: any + :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :paramtype partition_option: any + :keyword partition_settings: The settings that will be leveraged for Sql source partitioning. + :paramtype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings + """ super(SqlServerSource, self).__init__(**kwargs) self.type = 'SqlServerSource' # type: str self.sql_reader_query = kwargs.get('sql_reader_query', None) @@ -34142,29 +47465,29 @@ class SqlServerStoredProcedureActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param stored_procedure_name: Required. Stored procedure name. Type: string (or Expression with + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar stored_procedure_name: Required. Stored procedure name. Type: string (or Expression with resultType string). - :type stored_procedure_name: any - :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + :vartype stored_procedure_name: any + :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, + :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] """ @@ -34191,6 +47514,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword stored_procedure_name: Required. Stored procedure name. Type: string (or Expression + with resultType string). + :paramtype stored_procedure_name: any + :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :paramtype stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + """ super(SqlServerStoredProcedureActivity, self).__init__(**kwargs) self.type = 'SqlServerStoredProcedure' # type: str self.stored_procedure_name = kwargs['stored_procedure_name'] @@ -34202,37 +47549,37 @@ class SqlServerTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param schema_type_properties_schema: The schema name of the SQL Server dataset. Type: string + :vartype table_name: any + :ivar schema_type_properties_schema: The schema name of the SQL Server dataset. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any - :param table: The table name of the SQL Server dataset. Type: string (or Expression with + :vartype schema_type_properties_schema: any + :ivar table: The table name of the SQL Server dataset. Type: string (or Expression with resultType string). - :type table: any + :vartype table: any """ _validation = { @@ -34259,6 +47606,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword schema_type_properties_schema: The schema name of the SQL Server dataset. Type: string + (or Expression with resultType string). + :paramtype schema_type_properties_schema: any + :keyword table: The table name of the SQL Server dataset. Type: string (or Expression with + resultType string). + :paramtype table: any + """ super(SqlServerTableDataset, self).__init__(**kwargs) self.type = 'SqlServerTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -34271,55 +47649,55 @@ class SqlSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + :vartype disable_metrics_collection: any + :ivar sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: any - :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with - resultType string). - :type sql_writer_table_type: any - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + :vartype sql_writer_stored_procedure_name: any + :ivar sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType + string). + :vartype sql_writer_table_type: any + :ivar pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: any - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, + :vartype pre_copy_script: any + :ivar stored_procedure_parameters: SQL stored procedure parameters. + :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + :ivar stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). - :type stored_procedure_table_type_parameter_name: any - :param table_option: The option to handle sink table, such as autoCreate. For now only + :vartype stored_procedure_table_type_parameter_name: any + :ivar table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: any - :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + :vartype table_option: any + :ivar sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). - :type sql_writer_use_table_lock: any - :param write_behavior: Write behavior when copying data into sql. Type: SqlWriteBehaviorEnum - (or Expression with resultType SqlWriteBehaviorEnum). - :type write_behavior: any - :param upsert_settings: SQL upsert settings. - :type upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings + :vartype sql_writer_use_table_lock: any + :ivar write_behavior: Write behavior when copying data into sql. Type: SqlWriteBehaviorEnum (or + Expression with resultType SqlWriteBehaviorEnum). + :vartype write_behavior: any + :ivar upsert_settings: SQL upsert settings. + :vartype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings """ _validation = { @@ -34350,6 +47728,55 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + Expression with resultType string). + :paramtype sql_writer_stored_procedure_name: any + :keyword sql_writer_table_type: SQL writer table type. Type: string (or Expression with + resultType string). + :paramtype sql_writer_table_type: any + :keyword pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :paramtype pre_copy_script: any + :keyword stored_procedure_parameters: SQL stored procedure parameters. + :paramtype stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :keyword stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + table type. Type: string (or Expression with resultType string). + :paramtype stored_procedure_table_type_parameter_name: any + :keyword table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :paramtype table_option: any + :keyword sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean + (or Expression with resultType boolean). + :paramtype sql_writer_use_table_lock: any + :keyword write_behavior: Write behavior when copying data into sql. Type: SqlWriteBehaviorEnum + (or Expression with resultType SqlWriteBehaviorEnum). + :paramtype write_behavior: any + :keyword upsert_settings: SQL upsert settings. + :paramtype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings + """ super(SqlSink, self).__init__(**kwargs) self.type = 'SqlSink' # type: str self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) @@ -34368,48 +47795,48 @@ class SqlSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: any - :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database - source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression - with resultType string). - :type sql_reader_stored_procedure_name: any - :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + :vartype additional_columns: any + :ivar sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :vartype sql_reader_query: any + :ivar sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. + This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with + resultType string). + :vartype sql_reader_stored_procedure_name: any + :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, + :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param isolation_level: Specifies the transaction locking behavior for the SQL source. Allowed + :ivar isolation_level: Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). - :type isolation_level: any - :param partition_option: The partition mechanism that will be used for Sql read in parallel. + :vartype isolation_level: any + :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: any - :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings + :vartype partition_option: any + :ivar partition_settings: The settings that will be leveraged for Sql source partitioning. + :vartype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -34437,6 +47864,49 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword sql_reader_query: SQL reader query. Type: string (or Expression with resultType + string). + :paramtype sql_reader_query: any + :keyword sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database + source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression + with resultType string). + :paramtype sql_reader_stored_procedure_name: any + :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :paramtype stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :keyword isolation_level: Specifies the transaction locking behavior for the SQL source. + Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default + value is ReadCommitted. Type: string (or Expression with resultType string). + :paramtype isolation_level: any + :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :paramtype partition_option: any + :keyword partition_settings: The settings that will be leveraged for Sql source partitioning. + :paramtype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings + """ super(SqlSource, self).__init__(**kwargs) self.type = 'SqlSource' # type: str self.sql_reader_query = kwargs.get('sql_reader_query', None) @@ -34450,15 +47920,15 @@ def __init__( class SqlUpsertSettings(msrest.serialization.Model): """Sql upsert option settings. - :param use_temp_db: Specifies whether to use temp db for upsert interim table. Type: boolean - (or Expression with resultType boolean). - :type use_temp_db: any - :param interim_schema_name: Schema name for interim table. Type: string (or Expression with + :ivar use_temp_db: Specifies whether to use temp db for upsert interim table. Type: boolean (or + Expression with resultType boolean). + :vartype use_temp_db: any + :ivar interim_schema_name: Schema name for interim table. Type: string (or Expression with resultType string). - :type interim_schema_name: any - :param keys: Key column names for unique row identification. Type: array of strings (or + :vartype interim_schema_name: any + :ivar keys: Key column names for unique row identification. Type: array of strings (or Expression with resultType array of strings). - :type keys: any + :vartype keys: any """ _attribute_map = { @@ -34471,6 +47941,17 @@ def __init__( self, **kwargs ): + """ + :keyword use_temp_db: Specifies whether to use temp db for upsert interim table. Type: boolean + (or Expression with resultType boolean). + :paramtype use_temp_db: any + :keyword interim_schema_name: Schema name for interim table. Type: string (or Expression with + resultType string). + :paramtype interim_schema_name: any + :keyword keys: Key column names for unique row identification. Type: array of strings (or + Expression with resultType array of strings). + :paramtype keys: any + """ super(SqlUpsertSettings, self).__init__(**kwargs) self.use_temp_db = kwargs.get('use_temp_db', None) self.interim_schema_name = kwargs.get('interim_schema_name', None) @@ -34482,45 +47963,45 @@ class SquareLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_properties: Properties used to connect to Square. It is mutually exclusive + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_properties: Properties used to connect to Square. It is mutually exclusive with any other properties in the linked service. Type: object. - :type connection_properties: any - :param host: The URL of the Square instance. (i.e. mystore.mysquare.com). - :type host: any - :param client_id: The client ID associated with your Square application. - :type client_id: any - :param client_secret: The client secret associated with your Square application. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param redirect_uri: The redirect URL assigned in the Square application dashboard. (i.e. + :vartype connection_properties: any + :ivar host: The URL of the Square instance. (i.e. mystore.mysquare.com). + :vartype host: any + :ivar client_id: The client ID associated with your Square application. + :vartype client_id: any + :ivar client_secret: The client secret associated with your Square application. + :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar redirect_uri: The redirect URL assigned in the Square application dashboard. (i.e. http://localhost:2500). - :type redirect_uri: any - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype redirect_uri: any + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -34549,6 +48030,45 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_properties: Properties used to connect to Square. It is mutually exclusive + with any other properties in the linked service. Type: object. + :paramtype connection_properties: any + :keyword host: The URL of the Square instance. (i.e. mystore.mysquare.com). + :paramtype host: any + :keyword client_id: The client ID associated with your Square application. + :paramtype client_id: any + :keyword client_secret: The client secret associated with your Square application. + :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword redirect_uri: The redirect URL assigned in the Square application dashboard. (i.e. + http://localhost:2500). + :paramtype redirect_uri: any + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(SquareLinkedService, self).__init__(**kwargs) self.type = 'Square' # type: str self.connection_properties = kwargs.get('connection_properties', None) @@ -34567,30 +48087,30 @@ class SquareObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -34615,6 +48135,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(SquareObjectDataset, self).__init__(**kwargs) self.type = 'SquareObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -34625,32 +48169,32 @@ class SquareSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -34673,6 +48217,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(SquareSource, self).__init__(**kwargs) self.type = 'SquareSource' # type: str self.query = kwargs.get('query', None) @@ -34683,12 +48253,12 @@ class SSISAccessCredential(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param domain: Required. Domain for windows authentication. - :type domain: any - :param user_name: Required. UseName for windows authentication. - :type user_name: any - :param password: Required. Password for windows authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase + :ivar domain: Required. Domain for windows authentication. + :vartype domain: any + :ivar user_name: Required. UseName for windows authentication. + :vartype user_name: any + :ivar password: Required. Password for windows authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -34707,6 +48277,14 @@ def __init__( self, **kwargs ): + """ + :keyword domain: Required. Domain for windows authentication. + :paramtype domain: any + :keyword user_name: Required. UseName for windows authentication. + :paramtype user_name: any + :keyword password: Required. Password for windows authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + """ super(SSISAccessCredential, self).__init__(**kwargs) self.domain = kwargs['domain'] self.user_name = kwargs['user_name'] @@ -34718,16 +48296,16 @@ class SSISChildPackage(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param package_path: Required. Path for embedded child package. Type: string (or Expression - with resultType string). - :type package_path: any - :param package_name: Name for embedded child package. - :type package_name: str - :param package_content: Required. Content for embedded child package. Type: string (or + :ivar package_path: Required. Path for embedded child package. Type: string (or Expression with + resultType string). + :vartype package_path: any + :ivar package_name: Name for embedded child package. + :vartype package_name: str + :ivar package_content: Required. Content for embedded child package. Type: string (or Expression with resultType string). - :type package_content: any - :param package_last_modified_date: Last modified date for embedded child package. - :type package_last_modified_date: str + :vartype package_content: any + :ivar package_last_modified_date: Last modified date for embedded child package. + :vartype package_last_modified_date: str """ _validation = { @@ -34746,6 +48324,18 @@ def __init__( self, **kwargs ): + """ + :keyword package_path: Required. Path for embedded child package. Type: string (or Expression + with resultType string). + :paramtype package_path: any + :keyword package_name: Name for embedded child package. + :paramtype package_name: str + :keyword package_content: Required. Content for embedded child package. Type: string (or + Expression with resultType string). + :paramtype package_content: any + :keyword package_last_modified_date: Last modified date for embedded child package. + :paramtype package_last_modified_date: str + """ super(SSISChildPackage, self).__init__(**kwargs) self.package_path = kwargs['package_path'] self.package_name = kwargs.get('package_name', None) @@ -34761,15 +48351,15 @@ class SsisObjectMetadata(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of metadata.Constant filled by server. Possible values include: + :ivar type: Required. Type of metadata.Constant filled by server. Possible values include: "Folder", "Project", "Package", "Environment". - :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str + :vartype type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType + :ivar id: Metadata id. + :vartype id: long + :ivar name: Metadata name. + :vartype name: str + :ivar description: Metadata description. + :vartype description: str """ _validation = { @@ -34791,6 +48381,14 @@ def __init__( self, **kwargs ): + """ + :keyword id: Metadata id. + :paramtype id: long + :keyword name: Metadata name. + :paramtype name: str + :keyword description: Metadata description. + :paramtype description: str + """ super(SsisObjectMetadata, self).__init__(**kwargs) self.type = None # type: Optional[str] self.id = kwargs.get('id', None) @@ -34803,19 +48401,19 @@ class SsisEnvironment(SsisObjectMetadata): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of metadata.Constant filled by server. Possible values include: + :ivar type: Required. Type of metadata.Constant filled by server. Possible values include: "Folder", "Project", "Package", "Environment". - :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param folder_id: Folder id which contains environment. - :type folder_id: long - :param variables: Variable in environment. - :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] + :vartype type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType + :ivar id: Metadata id. + :vartype id: long + :ivar name: Metadata name. + :vartype name: str + :ivar description: Metadata description. + :vartype description: str + :ivar folder_id: Folder id which contains environment. + :vartype folder_id: long + :ivar variables: Variable in environment. + :vartype variables: list[~azure.mgmt.datafactory.models.SsisVariable] """ _validation = { @@ -34835,6 +48433,18 @@ def __init__( self, **kwargs ): + """ + :keyword id: Metadata id. + :paramtype id: long + :keyword name: Metadata name. + :paramtype name: str + :keyword description: Metadata description. + :paramtype description: str + :keyword folder_id: Folder id which contains environment. + :paramtype folder_id: long + :keyword variables: Variable in environment. + :paramtype variables: list[~azure.mgmt.datafactory.models.SsisVariable] + """ super(SsisEnvironment, self).__init__(**kwargs) self.type = 'Environment' # type: str self.folder_id = kwargs.get('folder_id', None) @@ -34844,14 +48454,14 @@ def __init__( class SsisEnvironmentReference(msrest.serialization.Model): """Ssis environment reference. - :param id: Environment reference id. - :type id: long - :param environment_folder_name: Environment folder name. - :type environment_folder_name: str - :param environment_name: Environment name. - :type environment_name: str - :param reference_type: Reference type. - :type reference_type: str + :ivar id: Environment reference id. + :vartype id: long + :ivar environment_folder_name: Environment folder name. + :vartype environment_folder_name: str + :ivar environment_name: Environment name. + :vartype environment_name: str + :ivar reference_type: Reference type. + :vartype reference_type: str """ _attribute_map = { @@ -34865,6 +48475,16 @@ def __init__( self, **kwargs ): + """ + :keyword id: Environment reference id. + :paramtype id: long + :keyword environment_folder_name: Environment folder name. + :paramtype environment_folder_name: str + :keyword environment_name: Environment name. + :paramtype environment_name: str + :keyword reference_type: Reference type. + :paramtype reference_type: str + """ super(SsisEnvironmentReference, self).__init__(**kwargs) self.id = kwargs.get('id', None) self.environment_folder_name = kwargs.get('environment_folder_name', None) @@ -34877,12 +48497,12 @@ class SSISExecutionCredential(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param domain: Required. Domain for windows authentication. - :type domain: any - :param user_name: Required. UseName for windows authentication. - :type user_name: any - :param password: Required. Password for windows authentication. - :type password: ~azure.mgmt.datafactory.models.SecureString + :ivar domain: Required. Domain for windows authentication. + :vartype domain: any + :ivar user_name: Required. UseName for windows authentication. + :vartype user_name: any + :ivar password: Required. Password for windows authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecureString """ _validation = { @@ -34901,6 +48521,14 @@ def __init__( self, **kwargs ): + """ + :keyword domain: Required. Domain for windows authentication. + :paramtype domain: any + :keyword user_name: Required. UseName for windows authentication. + :paramtype user_name: any + :keyword password: Required. Password for windows authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecureString + """ super(SSISExecutionCredential, self).__init__(**kwargs) self.domain = kwargs['domain'] self.user_name = kwargs['user_name'] @@ -34912,9 +48540,9 @@ class SSISExecutionParameter(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. SSIS package execution parameter value. Type: string (or Expression - with resultType string). - :type value: any + :ivar value: Required. SSIS package execution parameter value. Type: string (or Expression with + resultType string). + :vartype value: any """ _validation = { @@ -34929,6 +48557,11 @@ def __init__( self, **kwargs ): + """ + :keyword value: Required. SSIS package execution parameter value. Type: string (or Expression + with resultType string). + :paramtype value: any + """ super(SSISExecutionParameter, self).__init__(**kwargs) self.value = kwargs['value'] @@ -34938,15 +48571,15 @@ class SsisFolder(SsisObjectMetadata): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of metadata.Constant filled by server. Possible values include: + :ivar type: Required. Type of metadata.Constant filled by server. Possible values include: "Folder", "Project", "Package", "Environment". - :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str + :vartype type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType + :ivar id: Metadata id. + :vartype id: long + :ivar name: Metadata name. + :vartype name: str + :ivar description: Metadata description. + :vartype description: str """ _validation = { @@ -34964,6 +48597,14 @@ def __init__( self, **kwargs ): + """ + :keyword id: Metadata id. + :paramtype id: long + :keyword name: Metadata name. + :paramtype name: str + :keyword description: Metadata description. + :paramtype description: str + """ super(SsisFolder, self).__init__(**kwargs) self.type = 'Folder' # type: str @@ -34973,17 +48614,17 @@ class SSISLogLocation(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param log_path: Required. The SSIS package execution log path. Type: string (or Expression - with resultType string). - :type log_path: any - :param type: Required. The type of SSIS log location. Possible values include: "File". - :type type: str or ~azure.mgmt.datafactory.models.SsisLogLocationType - :param access_credential: The package execution log access credential. - :type access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential - :param log_refresh_interval: Specifies the interval to refresh log. The default interval is 5 + :ivar log_path: Required. The SSIS package execution log path. Type: string (or Expression with + resultType string). + :vartype log_path: any + :ivar type: Required. The type of SSIS log location. Possible values include: "File". + :vartype type: str or ~azure.mgmt.datafactory.models.SsisLogLocationType + :ivar access_credential: The package execution log access credential. + :vartype access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential + :ivar log_refresh_interval: Specifies the interval to refresh log. The default interval is 5 minutes. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type log_refresh_interval: any + :vartype log_refresh_interval: any """ _validation = { @@ -35002,6 +48643,19 @@ def __init__( self, **kwargs ): + """ + :keyword log_path: Required. The SSIS package execution log path. Type: string (or Expression + with resultType string). + :paramtype log_path: any + :keyword type: Required. The type of SSIS log location. Possible values include: "File". + :paramtype type: str or ~azure.mgmt.datafactory.models.SsisLogLocationType + :keyword access_credential: The package execution log access credential. + :paramtype access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential + :keyword log_refresh_interval: Specifies the interval to refresh log. The default interval is 5 + minutes. Type: string (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype log_refresh_interval: any + """ super(SSISLogLocation, self).__init__(**kwargs) self.log_path = kwargs['log_path'] self.type = kwargs['type'] @@ -35012,10 +48666,10 @@ def __init__( class SsisObjectMetadataListResponse(msrest.serialization.Model): """A list of SSIS object metadata. - :param value: List of SSIS object metadata. - :type value: list[~azure.mgmt.datafactory.models.SsisObjectMetadata] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: List of SSIS object metadata. + :vartype value: list[~azure.mgmt.datafactory.models.SsisObjectMetadata] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _attribute_map = { @@ -35027,6 +48681,12 @@ def __init__( self, **kwargs ): + """ + :keyword value: List of SSIS object metadata. + :paramtype value: list[~azure.mgmt.datafactory.models.SsisObjectMetadata] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(SsisObjectMetadataListResponse, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = kwargs.get('next_link', None) @@ -35035,14 +48695,14 @@ def __init__( class SsisObjectMetadataStatusResponse(msrest.serialization.Model): """The status of the operation. - :param status: The status of the operation. - :type status: str - :param name: The operation name. - :type name: str - :param properties: The operation properties. - :type properties: str - :param error: The operation error message. - :type error: str + :ivar status: The status of the operation. + :vartype status: str + :ivar name: The operation name. + :vartype name: str + :ivar properties: The operation properties. + :vartype properties: str + :ivar error: The operation error message. + :vartype error: str """ _attribute_map = { @@ -35056,6 +48716,16 @@ def __init__( self, **kwargs ): + """ + :keyword status: The status of the operation. + :paramtype status: str + :keyword name: The operation name. + :paramtype name: str + :keyword properties: The operation properties. + :paramtype properties: str + :keyword error: The operation error message. + :paramtype error: str + """ super(SsisObjectMetadataStatusResponse, self).__init__(**kwargs) self.status = kwargs.get('status', None) self.name = kwargs.get('name', None) @@ -35068,23 +48738,23 @@ class SsisPackage(SsisObjectMetadata): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of metadata.Constant filled by server. Possible values include: + :ivar type: Required. Type of metadata.Constant filled by server. Possible values include: "Folder", "Project", "Package", "Environment". - :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param folder_id: Folder id which contains package. - :type folder_id: long - :param project_version: Project version which contains package. - :type project_version: long - :param project_id: Project id which contains package. - :type project_id: long - :param parameters: Parameters in package. - :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + :vartype type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType + :ivar id: Metadata id. + :vartype id: long + :ivar name: Metadata name. + :vartype name: str + :ivar description: Metadata description. + :vartype description: str + :ivar folder_id: Folder id which contains package. + :vartype folder_id: long + :ivar project_version: Project version which contains package. + :vartype project_version: long + :ivar project_id: Project id which contains package. + :vartype project_id: long + :ivar parameters: Parameters in package. + :vartype parameters: list[~azure.mgmt.datafactory.models.SsisParameter] """ _validation = { @@ -35106,6 +48776,22 @@ def __init__( self, **kwargs ): + """ + :keyword id: Metadata id. + :paramtype id: long + :keyword name: Metadata name. + :paramtype name: str + :keyword description: Metadata description. + :paramtype description: str + :keyword folder_id: Folder id which contains package. + :paramtype folder_id: long + :keyword project_version: Project version which contains package. + :paramtype project_version: long + :keyword project_id: Project id which contains package. + :paramtype project_id: long + :keyword parameters: Parameters in package. + :paramtype parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ super(SsisPackage, self).__init__(**kwargs) self.type = 'Package' # type: str self.folder_id = kwargs.get('folder_id', None) @@ -35117,30 +48803,29 @@ def __init__( class SSISPackageLocation(msrest.serialization.Model): """SSIS package location. - :param package_path: The SSIS package path. Type: string (or Expression with resultType - string). - :type package_path: any - :param type: The type of SSIS package location. Possible values include: "SSISDB", "File", + :ivar package_path: The SSIS package path. Type: string (or Expression with resultType string). + :vartype package_path: any + :ivar type: The type of SSIS package location. Possible values include: "SSISDB", "File", "InlinePackage", "PackageStore". - :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType - :param package_password: Password of the package. - :type package_password: ~azure.mgmt.datafactory.models.SecretBase - :param access_credential: The package access credential. - :type access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential - :param configuration_path: The configuration file of the package execution. Type: string (or + :vartype type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType + :ivar package_password: Password of the package. + :vartype package_password: ~azure.mgmt.datafactory.models.SecretBase + :ivar access_credential: The package access credential. + :vartype access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential + :ivar configuration_path: The configuration file of the package execution. Type: string (or Expression with resultType string). - :type configuration_path: any - :param configuration_access_credential: The configuration file access credential. - :type configuration_access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential - :param package_name: The package name. - :type package_name: str - :param package_content: The embedded package content. Type: string (or Expression with + :vartype configuration_path: any + :ivar configuration_access_credential: The configuration file access credential. + :vartype configuration_access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential + :ivar package_name: The package name. + :vartype package_name: str + :ivar package_content: The embedded package content. Type: string (or Expression with resultType string). - :type package_content: any - :param package_last_modified_date: The embedded package last modified date. - :type package_last_modified_date: str - :param child_packages: The embedded child package list. - :type child_packages: list[~azure.mgmt.datafactory.models.SSISChildPackage] + :vartype package_content: any + :ivar package_last_modified_date: The embedded package last modified date. + :vartype package_last_modified_date: str + :ivar child_packages: The embedded child package list. + :vartype child_packages: list[~azure.mgmt.datafactory.models.SSISChildPackage] """ _attribute_map = { @@ -35160,6 +48845,32 @@ def __init__( self, **kwargs ): + """ + :keyword package_path: The SSIS package path. Type: string (or Expression with resultType + string). + :paramtype package_path: any + :keyword type: The type of SSIS package location. Possible values include: "SSISDB", "File", + "InlinePackage", "PackageStore". + :paramtype type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType + :keyword package_password: Password of the package. + :paramtype package_password: ~azure.mgmt.datafactory.models.SecretBase + :keyword access_credential: The package access credential. + :paramtype access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential + :keyword configuration_path: The configuration file of the package execution. Type: string (or + Expression with resultType string). + :paramtype configuration_path: any + :keyword configuration_access_credential: The configuration file access credential. + :paramtype configuration_access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential + :keyword package_name: The package name. + :paramtype package_name: str + :keyword package_content: The embedded package content. Type: string (or Expression with + resultType string). + :paramtype package_content: any + :keyword package_last_modified_date: The embedded package last modified date. + :paramtype package_last_modified_date: str + :keyword child_packages: The embedded child package list. + :paramtype child_packages: list[~azure.mgmt.datafactory.models.SSISChildPackage] + """ super(SSISPackageLocation, self).__init__(**kwargs) self.package_path = kwargs.get('package_path', None) self.type = kwargs.get('type', None) @@ -35176,30 +48887,30 @@ def __init__( class SsisParameter(msrest.serialization.Model): """Ssis parameter. - :param id: Parameter id. - :type id: long - :param name: Parameter name. - :type name: str - :param description: Parameter description. - :type description: str - :param data_type: Parameter type. - :type data_type: str - :param required: Whether parameter is required. - :type required: bool - :param sensitive: Whether parameter is sensitive. - :type sensitive: bool - :param design_default_value: Design default value of parameter. - :type design_default_value: str - :param default_value: Default value of parameter. - :type default_value: str - :param sensitive_default_value: Default sensitive value of parameter. - :type sensitive_default_value: str - :param value_type: Parameter value type. - :type value_type: str - :param value_set: Parameter value set. - :type value_set: bool - :param variable: Parameter reference variable. - :type variable: str + :ivar id: Parameter id. + :vartype id: long + :ivar name: Parameter name. + :vartype name: str + :ivar description: Parameter description. + :vartype description: str + :ivar data_type: Parameter type. + :vartype data_type: str + :ivar required: Whether parameter is required. + :vartype required: bool + :ivar sensitive: Whether parameter is sensitive. + :vartype sensitive: bool + :ivar design_default_value: Design default value of parameter. + :vartype design_default_value: str + :ivar default_value: Default value of parameter. + :vartype default_value: str + :ivar sensitive_default_value: Default sensitive value of parameter. + :vartype sensitive_default_value: str + :ivar value_type: Parameter value type. + :vartype value_type: str + :ivar value_set: Parameter value set. + :vartype value_set: bool + :ivar variable: Parameter reference variable. + :vartype variable: str """ _attribute_map = { @@ -35221,6 +48932,32 @@ def __init__( self, **kwargs ): + """ + :keyword id: Parameter id. + :paramtype id: long + :keyword name: Parameter name. + :paramtype name: str + :keyword description: Parameter description. + :paramtype description: str + :keyword data_type: Parameter type. + :paramtype data_type: str + :keyword required: Whether parameter is required. + :paramtype required: bool + :keyword sensitive: Whether parameter is sensitive. + :paramtype sensitive: bool + :keyword design_default_value: Design default value of parameter. + :paramtype design_default_value: str + :keyword default_value: Default value of parameter. + :paramtype default_value: str + :keyword sensitive_default_value: Default sensitive value of parameter. + :paramtype sensitive_default_value: str + :keyword value_type: Parameter value type. + :paramtype value_type: str + :keyword value_set: Parameter value set. + :paramtype value_set: bool + :keyword variable: Parameter reference variable. + :paramtype variable: str + """ super(SsisParameter, self).__init__(**kwargs) self.id = kwargs.get('id', None) self.name = kwargs.get('name', None) @@ -35241,23 +48978,23 @@ class SsisProject(SsisObjectMetadata): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of metadata.Constant filled by server. Possible values include: + :ivar type: Required. Type of metadata.Constant filled by server. Possible values include: "Folder", "Project", "Package", "Environment". - :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param folder_id: Folder id which contains project. - :type folder_id: long - :param version: Project version. - :type version: long - :param environment_refs: Environment reference in project. - :type environment_refs: list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] - :param parameters: Parameters in project. - :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + :vartype type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType + :ivar id: Metadata id. + :vartype id: long + :ivar name: Metadata name. + :vartype name: str + :ivar description: Metadata description. + :vartype description: str + :ivar folder_id: Folder id which contains project. + :vartype folder_id: long + :ivar version: Project version. + :vartype version: long + :ivar environment_refs: Environment reference in project. + :vartype environment_refs: list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] + :ivar parameters: Parameters in project. + :vartype parameters: list[~azure.mgmt.datafactory.models.SsisParameter] """ _validation = { @@ -35279,6 +49016,22 @@ def __init__( self, **kwargs ): + """ + :keyword id: Metadata id. + :paramtype id: long + :keyword name: Metadata name. + :paramtype name: str + :keyword description: Metadata description. + :paramtype description: str + :keyword folder_id: Folder id which contains project. + :paramtype folder_id: long + :keyword version: Project version. + :paramtype version: long + :keyword environment_refs: Environment reference in project. + :paramtype environment_refs: list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] + :keyword parameters: Parameters in project. + :paramtype parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ super(SsisProject, self).__init__(**kwargs) self.type = 'Project' # type: str self.folder_id = kwargs.get('folder_id', None) @@ -35292,12 +49045,12 @@ class SSISPropertyOverride(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. SSIS package property override value. Type: string (or Expression with + :ivar value: Required. SSIS package property override value. Type: string (or Expression with resultType string). - :type value: any - :param is_sensitive: Whether SSIS package property override value is sensitive data. Value will + :vartype value: any + :ivar is_sensitive: Whether SSIS package property override value is sensitive data. Value will be encrypted in SSISDB if it is true. - :type is_sensitive: bool + :vartype is_sensitive: bool """ _validation = { @@ -35313,6 +49066,14 @@ def __init__( self, **kwargs ): + """ + :keyword value: Required. SSIS package property override value. Type: string (or Expression + with resultType string). + :paramtype value: any + :keyword is_sensitive: Whether SSIS package property override value is sensitive data. Value + will be encrypted in SSISDB if it is true. + :paramtype is_sensitive: bool + """ super(SSISPropertyOverride, self).__init__(**kwargs) self.value = kwargs['value'] self.is_sensitive = kwargs.get('is_sensitive', None) @@ -35321,20 +49082,20 @@ def __init__( class SsisVariable(msrest.serialization.Model): """Ssis variable. - :param id: Variable id. - :type id: long - :param name: Variable name. - :type name: str - :param description: Variable description. - :type description: str - :param data_type: Variable type. - :type data_type: str - :param sensitive: Whether variable is sensitive. - :type sensitive: bool - :param value: Variable value. - :type value: str - :param sensitive_value: Variable sensitive value. - :type sensitive_value: str + :ivar id: Variable id. + :vartype id: long + :ivar name: Variable name. + :vartype name: str + :ivar description: Variable description. + :vartype description: str + :ivar data_type: Variable type. + :vartype data_type: str + :ivar sensitive: Whether variable is sensitive. + :vartype sensitive: bool + :ivar value: Variable value. + :vartype value: str + :ivar sensitive_value: Variable sensitive value. + :vartype sensitive_value: str """ _attribute_map = { @@ -35351,6 +49112,22 @@ def __init__( self, **kwargs ): + """ + :keyword id: Variable id. + :paramtype id: long + :keyword name: Variable name. + :paramtype name: str + :keyword description: Variable description. + :paramtype description: str + :keyword data_type: Variable type. + :paramtype data_type: str + :keyword sensitive: Whether variable is sensitive. + :paramtype sensitive: bool + :keyword value: Variable value. + :paramtype value: str + :keyword sensitive_value: Variable sensitive value. + :paramtype sensitive_value: str + """ super(SsisVariable, self).__init__(**kwargs) self.id = kwargs.get('id', None) self.name = kwargs.get('name', None) @@ -35366,17 +49143,17 @@ class StagingSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param linked_service_name: Required. Staging linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param path: The path to storage for storing the interim data. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar linked_service_name: Required. Staging linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar path: The path to storage for storing the interim data. Type: string (or Expression with resultType string). - :type path: any - :param enable_compression: Specifies whether to use compression when copying data via an - interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). - :type enable_compression: any + :vartype path: any + :ivar enable_compression: Specifies whether to use compression when copying data via an interim + staging. Default value is false. Type: boolean (or Expression with resultType boolean). + :vartype enable_compression: any """ _validation = { @@ -35394,6 +49171,19 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword linked_service_name: Required. Staging linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword path: The path to storage for storing the interim data. Type: string (or Expression + with resultType string). + :paramtype path: any + :keyword enable_compression: Specifies whether to use compression when copying data via an + interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). + :paramtype enable_compression: any + """ super(StagingSettings, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.linked_service_name = kwargs['linked_service_name'] @@ -35404,12 +49194,12 @@ def __init__( class StoredProcedureParameter(msrest.serialization.Model): """SQL stored procedure parameter. - :param value: Stored procedure parameter value. Type: string (or Expression with resultType + :ivar value: Stored procedure parameter value. Type: string (or Expression with resultType string). - :type value: any - :param type: Stored procedure parameter type. Possible values include: "String", "Int", - "Int64", "Decimal", "Guid", "Boolean", "Date". - :type type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType + :vartype value: any + :ivar type: Stored procedure parameter type. Possible values include: "String", "Int", "Int64", + "Decimal", "Guid", "Boolean", "Date". + :vartype type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType """ _attribute_map = { @@ -35421,6 +49211,14 @@ def __init__( self, **kwargs ): + """ + :keyword value: Stored procedure parameter value. Type: string (or Expression with resultType + string). + :paramtype value: any + :keyword type: Stored procedure parameter type. Possible values include: "String", "Int", + "Int64", "Decimal", "Guid", "Boolean", "Date". + :paramtype type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType + """ super(StoredProcedureParameter, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.type = kwargs.get('type', None) @@ -35431,29 +49229,29 @@ class SwitchActivity(ControlActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param on: Required. An expression that would evaluate to a string or integer. This is used to + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar on: Required. An expression that would evaluate to a string or integer. This is used to determine the block of activities in cases that will be executed. - :type on: ~azure.mgmt.datafactory.models.Expression - :param cases: List of cases that correspond to expected values of the 'on' property. This is an + :vartype on: ~azure.mgmt.datafactory.models.Expression + :ivar cases: List of cases that correspond to expected values of the 'on' property. This is an optional property and if not provided, the activity will execute activities provided in defaultActivities. - :type cases: list[~azure.mgmt.datafactory.models.SwitchCase] - :param default_activities: List of activities to execute if no case condition is satisfied. - This is an optional property and if not provided, the activity will exit without any action. - :type default_activities: list[~azure.mgmt.datafactory.models.Activity] + :vartype cases: list[~azure.mgmt.datafactory.models.SwitchCase] + :ivar default_activities: List of activities to execute if no case condition is satisfied. This + is an optional property and if not provided, the activity will exit without any action. + :vartype default_activities: list[~azure.mgmt.datafactory.models.Activity] """ _validation = { @@ -35478,6 +49276,29 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword on: Required. An expression that would evaluate to a string or integer. This is used + to determine the block of activities in cases that will be executed. + :paramtype on: ~azure.mgmt.datafactory.models.Expression + :keyword cases: List of cases that correspond to expected values of the 'on' property. This is + an optional property and if not provided, the activity will execute activities provided in + defaultActivities. + :paramtype cases: list[~azure.mgmt.datafactory.models.SwitchCase] + :keyword default_activities: List of activities to execute if no case condition is satisfied. + This is an optional property and if not provided, the activity will exit without any action. + :paramtype default_activities: list[~azure.mgmt.datafactory.models.Activity] + """ super(SwitchActivity, self).__init__(**kwargs) self.type = 'Switch' # type: str self.on = kwargs['on'] @@ -35488,10 +49309,10 @@ def __init__( class SwitchCase(msrest.serialization.Model): """Switch cases with have a value and corresponding activities. - :param value: Expected value that satisfies the expression result of the 'on' property. - :type value: str - :param activities: List of activities to execute for satisfied case condition. - :type activities: list[~azure.mgmt.datafactory.models.Activity] + :ivar value: Expected value that satisfies the expression result of the 'on' property. + :vartype value: str + :ivar activities: List of activities to execute for satisfied case condition. + :vartype activities: list[~azure.mgmt.datafactory.models.Activity] """ _attribute_map = { @@ -35503,6 +49324,12 @@ def __init__( self, **kwargs ): + """ + :keyword value: Expected value that satisfies the expression result of the 'on' property. + :paramtype value: str + :keyword activities: List of activities to execute for satisfied case condition. + :paramtype activities: list[~azure.mgmt.datafactory.models.Activity] + """ super(SwitchCase, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.activities = kwargs.get('activities', None) @@ -35513,39 +49340,39 @@ class SybaseLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param server: Required. Server name for connection. Type: string (or Expression with - resultType string). - :type server: any - :param database: Required. Database name for connection. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar server: Required. Server name for connection. Type: string (or Expression with resultType + string). + :vartype server: any + :ivar database: Required. Database name for connection. Type: string (or Expression with resultType string). - :type database: any - :param schema: Schema name for connection. Type: string (or Expression with resultType string). - :type schema: any - :param authentication_type: AuthenticationType to be used for connection. Possible values + :vartype database: any + :ivar schema: Schema name for connection. Type: string (or Expression with resultType string). + :vartype schema: any + :ivar authentication_type: AuthenticationType to be used for connection. Possible values include: "Basic", "Windows". - :type authentication_type: str or ~azure.mgmt.datafactory.models.SybaseAuthenticationType - :param username: Username for authentication. Type: string (or Expression with resultType + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.SybaseAuthenticationType + :ivar username: Username for authentication. Type: string (or Expression with resultType string). - :type username: any - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype username: any + :ivar password: Password for authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -35574,6 +49401,40 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword server: Required. Server name for connection. Type: string (or Expression with + resultType string). + :paramtype server: any + :keyword database: Required. Database name for connection. Type: string (or Expression with + resultType string). + :paramtype database: any + :keyword schema: Schema name for connection. Type: string (or Expression with resultType + string). + :paramtype schema: any + :keyword authentication_type: AuthenticationType to be used for connection. Possible values + include: "Basic", "Windows". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.SybaseAuthenticationType + :keyword username: Username for authentication. Type: string (or Expression with resultType + string). + :paramtype username: any + :keyword password: Password for authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(SybaseLinkedService, self).__init__(**kwargs) self.type = 'Sybase' # type: str self.server = kwargs['server'] @@ -35590,31 +49451,31 @@ class SybaseSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: Database query. Type: string (or Expression with resultType string). - :type query: any + :vartype additional_columns: any + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: any """ _validation = { @@ -35637,6 +49498,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: any + """ super(SybaseSource, self).__init__(**kwargs) self.type = 'SybaseSource' # type: str self.query = kwargs.get('query', None) @@ -35647,30 +49533,30 @@ class SybaseTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The Sybase table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The Sybase table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -35695,6 +49581,31 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The Sybase table name. Type: string (or Expression with resultType + string). + :paramtype table_name: any + """ super(SybaseTableDataset, self).__init__(**kwargs) self.type = 'SybaseTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -35705,37 +49616,37 @@ class TabularTranslator(CopyTranslator): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy translator type.Constant filled by server. - :type type: str - :param column_mappings: Column mappings. Example: "UserId: MyUserId, Group: MyGroup, Name: + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy translator type.Constant filled by server. + :vartype type: str + :ivar column_mappings: Column mappings. Example: "UserId: MyUserId, Group: MyGroup, Name: MyName" Type: string (or Expression with resultType string). This property will be retired. Please use mappings property. - :type column_mappings: any - :param schema_mapping: The schema mapping to map between tabular data and hierarchical data. + :vartype column_mappings: any + :ivar schema_mapping: The schema mapping to map between tabular data and hierarchical data. Example: {"Column1": "$.Column1", "Column2": "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or Expression with resultType object). This property will be retired. Please use mappings property. - :type schema_mapping: any - :param collection_reference: The JSON Path of the Nested Array that is going to do cross-apply. + :vartype schema_mapping: any + :ivar collection_reference: The JSON Path of the Nested Array that is going to do cross-apply. Type: object (or Expression with resultType object). - :type collection_reference: any - :param map_complex_values_to_string: Whether to map complex (array and object) values to simple + :vartype collection_reference: any + :ivar map_complex_values_to_string: Whether to map complex (array and object) values to simple strings in json format. Type: boolean (or Expression with resultType boolean). - :type map_complex_values_to_string: any - :param mappings: Column mappings with logical types. Tabular->tabular example: + :vartype map_complex_values_to_string: any + :ivar mappings: Column mappings with logical types. Tabular->tabular example: [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. Hierarchical->tabular example: [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. Type: object (or Expression with resultType object). - :type mappings: any - :param type_conversion: Whether to enable the advanced type conversion feature in the Copy + :vartype mappings: any + :ivar type_conversion: Whether to enable the advanced type conversion feature in the Copy activity. Type: boolean (or Expression with resultType boolean). - :type type_conversion: any - :param type_conversion_settings: Type conversion settings. - :type type_conversion_settings: ~azure.mgmt.datafactory.models.TypeConversionSettings + :vartype type_conversion: any + :ivar type_conversion_settings: Type conversion settings. + :vartype type_conversion_settings: ~azure.mgmt.datafactory.models.TypeConversionSettings """ _validation = { @@ -35758,6 +49669,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword column_mappings: Column mappings. Example: "UserId: MyUserId, Group: MyGroup, Name: + MyName" Type: string (or Expression with resultType string). This property will be retired. + Please use mappings property. + :paramtype column_mappings: any + :keyword schema_mapping: The schema mapping to map between tabular data and hierarchical data. + Example: {"Column1": "$.Column1", "Column2": "$.Column2.Property1", "Column3": + "$.Column2.Property2"}. Type: object (or Expression with resultType object). This property will + be retired. Please use mappings property. + :paramtype schema_mapping: any + :keyword collection_reference: The JSON Path of the Nested Array that is going to do + cross-apply. Type: object (or Expression with resultType object). + :paramtype collection_reference: any + :keyword map_complex_values_to_string: Whether to map complex (array and object) values to + simple strings in json format. Type: boolean (or Expression with resultType boolean). + :paramtype map_complex_values_to_string: any + :keyword mappings: Column mappings with logical types. Tabular->tabular example: + [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. + Hierarchical->tabular example: + [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. + Type: object (or Expression with resultType object). + :paramtype mappings: any + :keyword type_conversion: Whether to enable the advanced type conversion feature in the Copy + activity. Type: boolean (or Expression with resultType boolean). + :paramtype type_conversion: any + :keyword type_conversion_settings: Type conversion settings. + :paramtype type_conversion_settings: ~azure.mgmt.datafactory.models.TypeConversionSettings + """ super(TabularTranslator, self).__init__(**kwargs) self.type = 'TabularTranslator' # type: str self.column_mappings = kwargs.get('column_mappings', None) @@ -35774,14 +49716,14 @@ class TarGZipReadSettings(CompressionReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The Compression setting type.Constant filled by server. - :type type: str - :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder + :vartype additional_properties: dict[str, any] + :ivar type: Required. The Compression setting type.Constant filled by server. + :vartype type: str + :ivar preserve_compression_file_name_as_folder: Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). - :type preserve_compression_file_name_as_folder: any + :vartype preserve_compression_file_name_as_folder: any """ _validation = { @@ -35798,6 +49740,14 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword preserve_compression_file_name_as_folder: Preserve the compression file name as folder + path. Type: boolean (or Expression with resultType boolean). + :paramtype preserve_compression_file_name_as_folder: any + """ super(TarGZipReadSettings, self).__init__(**kwargs) self.type = 'TarGZipReadSettings' # type: str self.preserve_compression_file_name_as_folder = kwargs.get('preserve_compression_file_name_as_folder', None) @@ -35808,14 +49758,14 @@ class TarReadSettings(CompressionReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The Compression setting type.Constant filled by server. - :type type: str - :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder + :vartype additional_properties: dict[str, any] + :ivar type: Required. The Compression setting type.Constant filled by server. + :vartype type: str + :ivar preserve_compression_file_name_as_folder: Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). - :type preserve_compression_file_name_as_folder: any + :vartype preserve_compression_file_name_as_folder: any """ _validation = { @@ -35832,6 +49782,14 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword preserve_compression_file_name_as_folder: Preserve the compression file name as folder + path. Type: boolean (or Expression with resultType boolean). + :paramtype preserve_compression_file_name_as_folder: any + """ super(TarReadSettings, self).__init__(**kwargs) self.type = 'TarReadSettings' # type: str self.preserve_compression_file_name_as_folder = kwargs.get('preserve_compression_file_name_as_folder', None) @@ -35842,36 +49800,36 @@ class TeradataLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Teradata ODBC connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Teradata ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param server: Server name for connection. Type: string (or Expression with resultType string). - :type server: any - :param authentication_type: AuthenticationType to be used for connection. Possible values + :vartype connection_string: any + :ivar server: Server name for connection. Type: string (or Expression with resultType string). + :vartype server: any + :ivar authentication_type: AuthenticationType to be used for connection. Possible values include: "Basic", "Windows". - :type authentication_type: str or ~azure.mgmt.datafactory.models.TeradataAuthenticationType - :param username: Username for authentication. Type: string (or Expression with resultType + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.TeradataAuthenticationType + :ivar username: Username for authentication. Type: string (or Expression with resultType string). - :type username: any - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype username: any + :ivar password: Password for authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -35897,6 +49855,38 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Teradata ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword server: Server name for connection. Type: string (or Expression with resultType + string). + :paramtype server: any + :keyword authentication_type: AuthenticationType to be used for connection. Possible values + include: "Basic", "Windows". + :paramtype authentication_type: str or + ~azure.mgmt.datafactory.models.TeradataAuthenticationType + :keyword username: Username for authentication. Type: string (or Expression with resultType + string). + :paramtype username: any + :keyword password: Password for authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(TeradataLinkedService, self).__init__(**kwargs) self.type = 'Teradata' # type: str self.connection_string = kwargs.get('connection_string', None) @@ -35910,17 +49900,17 @@ def __init__( class TeradataPartitionSettings(msrest.serialization.Model): """The settings that will be leveraged for teradata source partitioning. - :param partition_column_name: The name of the column that will be used for proceeding range or + :ivar partition_column_name: The name of the column that will be used for proceeding range or hash partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: any - :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + :vartype partition_column_name: any + :ivar partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_upper_bound: any - :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + :vartype partition_upper_bound: any + :ivar partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_lower_bound: any + :vartype partition_lower_bound: any """ _attribute_map = { @@ -35933,6 +49923,19 @@ def __init__( self, **kwargs ): + """ + :keyword partition_column_name: The name of the column that will be used for proceeding range + or hash partitioning. Type: string (or Expression with resultType string). + :paramtype partition_column_name: any + :keyword partition_upper_bound: The maximum value of column specified in partitionColumnName + that will be used for proceeding range partitioning. Type: string (or Expression with + resultType string). + :paramtype partition_upper_bound: any + :keyword partition_lower_bound: The minimum value of column specified in partitionColumnName + that will be used for proceeding range partitioning. Type: string (or Expression with + resultType string). + :paramtype partition_lower_bound: any + """ super(TeradataPartitionSettings, self).__init__(**kwargs) self.partition_column_name = kwargs.get('partition_column_name', None) self.partition_upper_bound = kwargs.get('partition_upper_bound', None) @@ -35944,37 +49947,36 @@ class TeradataSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: Teradata query. Type: string (or Expression with resultType string). - :type query: any - :param partition_option: The partition mechanism that will be used for teradata read in + :vartype additional_columns: any + :ivar query: Teradata query. Type: string (or Expression with resultType string). + :vartype query: any + :ivar partition_option: The partition mechanism that will be used for teradata read in parallel. Possible values include: "None", "Hash", "DynamicRange". - :type partition_option: any - :param partition_settings: The settings that will be leveraged for teradata source - partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.TeradataPartitionSettings + :vartype partition_option: any + :ivar partition_settings: The settings that will be leveraged for teradata source partitioning. + :vartype partition_settings: ~azure.mgmt.datafactory.models.TeradataPartitionSettings """ _validation = { @@ -35999,6 +50001,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: Teradata query. Type: string (or Expression with resultType string). + :paramtype query: any + :keyword partition_option: The partition mechanism that will be used for teradata read in + parallel. Possible values include: "None", "Hash", "DynamicRange". + :paramtype partition_option: any + :keyword partition_settings: The settings that will be leveraged for teradata source + partitioning. + :paramtype partition_settings: ~azure.mgmt.datafactory.models.TeradataPartitionSettings + """ super(TeradataSource, self).__init__(**kwargs) self.type = 'TeradataSource' # type: str self.query = kwargs.get('query', None) @@ -36011,33 +50044,33 @@ class TeradataTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param database: The database name of Teradata. Type: string (or Expression with resultType + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar database: The database name of Teradata. Type: string (or Expression with resultType string). - :type database: any - :param table: The table name of Teradata. Type: string (or Expression with resultType string). - :type table: any + :vartype database: any + :ivar table: The table name of Teradata. Type: string (or Expression with resultType string). + :vartype table: any """ _validation = { @@ -36063,6 +50096,34 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword database: The database name of Teradata. Type: string (or Expression with resultType + string). + :paramtype database: any + :keyword table: The table name of Teradata. Type: string (or Expression with resultType + string). + :paramtype table: any + """ super(TeradataTableDataset, self).__init__(**kwargs) self.type = 'TeradataTable' # type: str self.database = kwargs.get('database', None) @@ -36074,42 +50135,42 @@ class TextFormat(DatasetStorageFormat): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage format.Constant filled by server. - :type type: str - :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: any - :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: any - :param column_delimiter: The column delimiter. Type: string (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage format.Constant filled by server. + :vartype type: str + :ivar serializer: Serializer. Type: string (or Expression with resultType string). + :vartype serializer: any + :ivar deserializer: Deserializer. Type: string (or Expression with resultType string). + :vartype deserializer: any + :ivar column_delimiter: The column delimiter. Type: string (or Expression with resultType string). - :type column_delimiter: any - :param row_delimiter: The row delimiter. Type: string (or Expression with resultType string). - :type row_delimiter: any - :param escape_char: The escape character. Type: string (or Expression with resultType string). - :type escape_char: any - :param quote_char: The quote character. Type: string (or Expression with resultType string). - :type quote_char: any - :param null_value: The null value string. Type: string (or Expression with resultType string). - :type null_value: any - :param encoding_name: The code page name of the preferred encoding. If miss, the default value + :vartype column_delimiter: any + :ivar row_delimiter: The row delimiter. Type: string (or Expression with resultType string). + :vartype row_delimiter: any + :ivar escape_char: The escape character. Type: string (or Expression with resultType string). + :vartype escape_char: any + :ivar quote_char: The quote character. Type: string (or Expression with resultType string). + :vartype quote_char: any + :ivar null_value: The null value string. Type: string (or Expression with resultType string). + :vartype null_value: any + :ivar encoding_name: The code page name of the preferred encoding. If miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). - :type encoding_name: any - :param treat_empty_as_null: Treat empty column values in the text file as null. The default + :vartype encoding_name: any + :ivar treat_empty_as_null: Treat empty column values in the text file as null. The default value is true. Type: boolean (or Expression with resultType boolean). - :type treat_empty_as_null: any - :param skip_line_count: The number of lines/rows to be skipped when parsing text files. The + :vartype treat_empty_as_null: any + :ivar skip_line_count: The number of lines/rows to be skipped when parsing text files. The default value is 0. Type: integer (or Expression with resultType integer). - :type skip_line_count: any - :param first_row_as_header: When used as input, treat the first row of data as headers. When + :vartype skip_line_count: any + :ivar first_row_as_header: When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). - :type first_row_as_header: any + :vartype first_row_as_header: any """ _validation = { @@ -36136,6 +50197,44 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword serializer: Serializer. Type: string (or Expression with resultType string). + :paramtype serializer: any + :keyword deserializer: Deserializer. Type: string (or Expression with resultType string). + :paramtype deserializer: any + :keyword column_delimiter: The column delimiter. Type: string (or Expression with resultType + string). + :paramtype column_delimiter: any + :keyword row_delimiter: The row delimiter. Type: string (or Expression with resultType string). + :paramtype row_delimiter: any + :keyword escape_char: The escape character. Type: string (or Expression with resultType + string). + :paramtype escape_char: any + :keyword quote_char: The quote character. Type: string (or Expression with resultType string). + :paramtype quote_char: any + :keyword null_value: The null value string. Type: string (or Expression with resultType + string). + :paramtype null_value: any + :keyword encoding_name: The code page name of the preferred encoding. If miss, the default + value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode encoding. Refer to the ΓÇ£NameΓÇ¥ + column of the table in the following link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + resultType string). + :paramtype encoding_name: any + :keyword treat_empty_as_null: Treat empty column values in the text file as null. The default + value is true. Type: boolean (or Expression with resultType boolean). + :paramtype treat_empty_as_null: any + :keyword skip_line_count: The number of lines/rows to be skipped when parsing text files. The + default value is 0. Type: integer (or Expression with resultType integer). + :paramtype skip_line_count: any + :keyword first_row_as_header: When used as input, treat the first row of data as headers. When + used as output,write the headers into the output as the first row of data. The default value is + false. Type: boolean (or Expression with resultType boolean). + :paramtype first_row_as_header: any + """ super(TextFormat, self).__init__(**kwargs) self.type = 'TextFormat' # type: str self.column_delimiter = kwargs.get('column_delimiter', None) @@ -36157,10 +50256,10 @@ class TriggerDependencyReference(DependencyReference): All required parameters must be populated in order to send to Azure. - :param type: Required. The type of dependency reference.Constant filled by server. - :type type: str - :param reference_trigger: Required. Referenced trigger. - :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference + :ivar type: Required. The type of dependency reference.Constant filled by server. + :vartype type: str + :ivar reference_trigger: Required. Referenced trigger. + :vartype reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference """ _validation = { @@ -36181,6 +50280,10 @@ def __init__( self, **kwargs ): + """ + :keyword reference_trigger: Required. Referenced trigger. + :paramtype reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference + """ super(TriggerDependencyReference, self).__init__(**kwargs) self.type = 'TriggerDependencyReference' # type: str self.reference_trigger = kwargs['reference_trigger'] @@ -36189,12 +50292,12 @@ def __init__( class TriggerFilterParameters(msrest.serialization.Model): """Query parameters for triggers. - :param continuation_token: The continuation token for getting the next page of results. Null - for first page. - :type continuation_token: str - :param parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child rerun + :ivar continuation_token: The continuation token for getting the next page of results. Null for + first page. + :vartype continuation_token: str + :ivar parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child rerun triggers. - :type parent_trigger_name: str + :vartype parent_trigger_name: str """ _attribute_map = { @@ -36206,6 +50309,14 @@ def __init__( self, **kwargs ): + """ + :keyword continuation_token: The continuation token for getting the next page of results. Null + for first page. + :paramtype continuation_token: str + :keyword parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child + rerun triggers. + :paramtype parent_trigger_name: str + """ super(TriggerFilterParameters, self).__init__(**kwargs) self.continuation_token = kwargs.get('continuation_token', None) self.parent_trigger_name = kwargs.get('parent_trigger_name', None) @@ -36216,10 +50327,10 @@ class TriggerListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of triggers. - :type value: list[~azure.mgmt.datafactory.models.TriggerResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: Required. List of triggers. + :vartype value: list[~azure.mgmt.datafactory.models.TriggerResource] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _validation = { @@ -36235,6 +50346,12 @@ def __init__( self, **kwargs ): + """ + :keyword value: Required. List of triggers. + :paramtype value: list[~azure.mgmt.datafactory.models.TriggerResource] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(TriggerListResponse, self).__init__(**kwargs) self.value = kwargs['value'] self.next_link = kwargs.get('next_link', None) @@ -36243,10 +50360,10 @@ def __init__( class TriggerPipelineReference(msrest.serialization.Model): """Pipeline that needs to be triggered with the given parameters. - :param pipeline_reference: Pipeline reference. - :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference - :param parameters: Pipeline parameters. - :type parameters: dict[str, any] + :ivar pipeline_reference: Pipeline reference. + :vartype pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference + :ivar parameters: Pipeline parameters. + :vartype parameters: dict[str, any] """ _attribute_map = { @@ -36258,6 +50375,12 @@ def __init__( self, **kwargs ): + """ + :keyword pipeline_reference: Pipeline reference. + :paramtype pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference + :keyword parameters: Pipeline parameters. + :paramtype parameters: dict[str, any] + """ super(TriggerPipelineReference, self).__init__(**kwargs) self.pipeline_reference = kwargs.get('pipeline_reference', None) self.parameters = kwargs.get('parameters', None) @@ -36268,11 +50391,11 @@ class TriggerQueryResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of triggers. - :type value: list[~azure.mgmt.datafactory.models.TriggerResource] - :param continuation_token: The continuation token for getting the next page of results, if any + :ivar value: Required. List of triggers. + :vartype value: list[~azure.mgmt.datafactory.models.TriggerResource] + :ivar continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. - :type continuation_token: str + :vartype continuation_token: str """ _validation = { @@ -36288,6 +50411,13 @@ def __init__( self, **kwargs ): + """ + :keyword value: Required. List of triggers. + :paramtype value: list[~azure.mgmt.datafactory.models.TriggerResource] + :keyword continuation_token: The continuation token for getting the next page of results, if + any remaining results exist, null otherwise. + :paramtype continuation_token: str + """ super(TriggerQueryResponse, self).__init__(**kwargs) self.value = kwargs['value'] self.continuation_token = kwargs.get('continuation_token', None) @@ -36302,8 +50432,8 @@ class TriggerReference(msrest.serialization.Model): :ivar type: Trigger reference type. Has constant value: "TriggerReference". :vartype type: str - :param reference_name: Required. Reference trigger name. - :type reference_name: str + :ivar reference_name: Required. Reference trigger name. + :vartype reference_name: str """ _validation = { @@ -36322,6 +50452,10 @@ def __init__( self, **kwargs ): + """ + :keyword reference_name: Required. Reference trigger name. + :paramtype reference_name: str + """ super(TriggerReference, self).__init__(**kwargs) self.reference_name = kwargs['reference_name'] @@ -36341,8 +50475,8 @@ class TriggerResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Required. Properties of the trigger. - :type properties: ~azure.mgmt.datafactory.models.Trigger + :ivar properties: Required. Properties of the trigger. + :vartype properties: ~azure.mgmt.datafactory.models.Trigger """ _validation = { @@ -36365,6 +50499,10 @@ def __init__( self, **kwargs ): + """ + :keyword properties: Required. Properties of the trigger. + :paramtype properties: ~azure.mgmt.datafactory.models.Trigger + """ super(TriggerResource, self).__init__(**kwargs) self.properties = kwargs['properties'] @@ -36374,9 +50512,9 @@ class TriggerRun(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] + :vartype additional_properties: dict[str, any] :ivar trigger_run_id: Trigger run id. :vartype trigger_run_id: str :ivar trigger_name: Trigger name. @@ -36431,6 +50569,11 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(TriggerRun, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.trigger_run_id = None @@ -36450,11 +50593,11 @@ class TriggerRunsQueryResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of trigger runs. - :type value: list[~azure.mgmt.datafactory.models.TriggerRun] - :param continuation_token: The continuation token for getting the next page of results, if any + :ivar value: Required. List of trigger runs. + :vartype value: list[~azure.mgmt.datafactory.models.TriggerRun] + :ivar continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. - :type continuation_token: str + :vartype continuation_token: str """ _validation = { @@ -36470,6 +50613,13 @@ def __init__( self, **kwargs ): + """ + :keyword value: Required. List of trigger runs. + :paramtype value: list[~azure.mgmt.datafactory.models.TriggerRun] + :keyword continuation_token: The continuation token for getting the next page of results, if + any remaining results exist, null otherwise. + :paramtype continuation_token: str + """ super(TriggerRunsQueryResponse, self).__init__(**kwargs) self.value = kwargs['value'] self.continuation_token = kwargs.get('continuation_token', None) @@ -36501,6 +50651,8 @@ def __init__( self, **kwargs ): + """ + """ super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) self.trigger_name = None self.status = None @@ -36513,45 +50665,45 @@ class TumblingWindowTrigger(Trigger): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. Trigger type.Constant filled by server. + :vartype type: str + :ivar description: Trigger description. + :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[any] - :param pipeline: Required. Pipeline for which runs are created when an event is fired for + :ivar annotations: List of tags that can be used for describing the trigger. + :vartype annotations: list[any] + :ivar pipeline: Required. Pipeline for which runs are created when an event is fired for trigger window that is ready. - :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference - :param frequency: Required. The frequency of the time windows. Possible values include: + :vartype pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference + :ivar frequency: Required. The frequency of the time windows. Possible values include: "Minute", "Hour", "Month". - :type frequency: str or ~azure.mgmt.datafactory.models.TumblingWindowFrequency - :param interval: Required. The interval of the time windows. The minimum interval allowed is 15 + :vartype frequency: str or ~azure.mgmt.datafactory.models.TumblingWindowFrequency + :ivar interval: Required. The interval of the time windows. The minimum interval allowed is 15 Minutes. - :type interval: int - :param start_time: Required. The start time for the time period for the trigger during which + :vartype interval: int + :ivar start_time: Required. The start time for the time period for the trigger during which events are fired for windows that are ready. Only UTC time is currently supported. - :type start_time: ~datetime.datetime - :param end_time: The end time for the time period for the trigger during which events are fired + :vartype start_time: ~datetime.datetime + :ivar end_time: The end time for the time period for the trigger during which events are fired for windows that are ready. Only UTC time is currently supported. - :type end_time: ~datetime.datetime - :param delay: Specifies how long the trigger waits past due time before triggering new run. It + :vartype end_time: ~datetime.datetime + :ivar delay: Specifies how long the trigger waits past due time before triggering new run. It doesn't alter window start and end time. The default is 0. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type delay: any - :param max_concurrency: Required. The max number of parallel time windows (ready for execution) + :vartype delay: any + :ivar max_concurrency: Required. The max number of parallel time windows (ready for execution) for which a new run is triggered. - :type max_concurrency: int - :param retry_policy: Retry policy that will be applied for failed pipeline runs. - :type retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy - :param depends_on: Triggers that this trigger depends on. Only tumbling window triggers are + :vartype max_concurrency: int + :ivar retry_policy: Retry policy that will be applied for failed pipeline runs. + :vartype retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy + :ivar depends_on: Triggers that this trigger depends on. Only tumbling window triggers are supported. - :type depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] + :vartype depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] """ _validation = { @@ -36585,6 +50737,42 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Trigger description. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the trigger. + :paramtype annotations: list[any] + :keyword pipeline: Required. Pipeline for which runs are created when an event is fired for + trigger window that is ready. + :paramtype pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference + :keyword frequency: Required. The frequency of the time windows. Possible values include: + "Minute", "Hour", "Month". + :paramtype frequency: str or ~azure.mgmt.datafactory.models.TumblingWindowFrequency + :keyword interval: Required. The interval of the time windows. The minimum interval allowed is + 15 Minutes. + :paramtype interval: int + :keyword start_time: Required. The start time for the time period for the trigger during which + events are fired for windows that are ready. Only UTC time is currently supported. + :paramtype start_time: ~datetime.datetime + :keyword end_time: The end time for the time period for the trigger during which events are + fired for windows that are ready. Only UTC time is currently supported. + :paramtype end_time: ~datetime.datetime + :keyword delay: Specifies how long the trigger waits past due time before triggering new run. + It doesn't alter window start and end time. The default is 0. Type: string (or Expression with + resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype delay: any + :keyword max_concurrency: Required. The max number of parallel time windows (ready for + execution) for which a new run is triggered. + :paramtype max_concurrency: int + :keyword retry_policy: Retry policy that will be applied for failed pipeline runs. + :paramtype retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy + :keyword depends_on: Triggers that this trigger depends on. Only tumbling window triggers are + supported. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] + """ super(TumblingWindowTrigger, self).__init__(**kwargs) self.type = 'TumblingWindowTrigger' # type: str self.pipeline = kwargs['pipeline'] @@ -36603,16 +50791,16 @@ class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): All required parameters must be populated in order to send to Azure. - :param type: Required. The type of dependency reference.Constant filled by server. - :type type: str - :param reference_trigger: Required. Referenced trigger. - :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference - :param offset: Timespan applied to the start time of a tumbling window when evaluating + :ivar type: Required. The type of dependency reference.Constant filled by server. + :vartype type: str + :ivar reference_trigger: Required. Referenced trigger. + :vartype reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference + :ivar offset: Timespan applied to the start time of a tumbling window when evaluating dependency. - :type offset: str - :param size: The size of the window when evaluating the dependency. If undefined the frequency + :vartype offset: str + :ivar size: The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be used. - :type size: str + :vartype size: str """ _validation = { @@ -36633,6 +50821,16 @@ def __init__( self, **kwargs ): + """ + :keyword reference_trigger: Required. Referenced trigger. + :paramtype reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference + :keyword offset: Timespan applied to the start time of a tumbling window when evaluating + dependency. + :paramtype offset: str + :keyword size: The size of the window when evaluating the dependency. If undefined the + frequency of the tumbling window will be used. + :paramtype size: str + """ super(TumblingWindowTriggerDependencyReference, self).__init__(**kwargs) self.type = 'TumblingWindowTriggerDependencyReference' # type: str self.offset = kwargs.get('offset', None) @@ -36642,24 +50840,24 @@ def __init__( class TypeConversionSettings(msrest.serialization.Model): """Type conversion settings. - :param allow_data_truncation: Whether to allow data truncation when converting the data. Type: + :ivar allow_data_truncation: Whether to allow data truncation when converting the data. Type: boolean (or Expression with resultType boolean). - :type allow_data_truncation: any - :param treat_boolean_as_number: Whether to treat boolean values as numbers. Type: boolean (or + :vartype allow_data_truncation: any + :ivar treat_boolean_as_number: Whether to treat boolean values as numbers. Type: boolean (or Expression with resultType boolean). - :type treat_boolean_as_number: any - :param date_time_format: The format for DateTime values. Type: string (or Expression with + :vartype treat_boolean_as_number: any + :ivar date_time_format: The format for DateTime values. Type: string (or Expression with resultType string). - :type date_time_format: any - :param date_time_offset_format: The format for DateTimeOffset values. Type: string (or + :vartype date_time_format: any + :ivar date_time_offset_format: The format for DateTimeOffset values. Type: string (or Expression with resultType string). - :type date_time_offset_format: any - :param time_span_format: The format for TimeSpan values. Type: string (or Expression with + :vartype date_time_offset_format: any + :ivar time_span_format: The format for TimeSpan values. Type: string (or Expression with resultType string). - :type time_span_format: any - :param culture: The culture used to convert data from/to string. Type: string (or Expression + :vartype time_span_format: any + :ivar culture: The culture used to convert data from/to string. Type: string (or Expression with resultType string). - :type culture: any + :vartype culture: any """ _attribute_map = { @@ -36675,6 +50873,26 @@ def __init__( self, **kwargs ): + """ + :keyword allow_data_truncation: Whether to allow data truncation when converting the data. + Type: boolean (or Expression with resultType boolean). + :paramtype allow_data_truncation: any + :keyword treat_boolean_as_number: Whether to treat boolean values as numbers. Type: boolean (or + Expression with resultType boolean). + :paramtype treat_boolean_as_number: any + :keyword date_time_format: The format for DateTime values. Type: string (or Expression with + resultType string). + :paramtype date_time_format: any + :keyword date_time_offset_format: The format for DateTimeOffset values. Type: string (or + Expression with resultType string). + :paramtype date_time_offset_format: any + :keyword time_span_format: The format for TimeSpan values. Type: string (or Expression with + resultType string). + :paramtype time_span_format: any + :keyword culture: The culture used to convert data from/to string. Type: string (or Expression + with resultType string). + :paramtype culture: any + """ super(TypeConversionSettings, self).__init__(**kwargs) self.allow_data_truncation = kwargs.get('allow_data_truncation', None) self.treat_boolean_as_number = kwargs.get('treat_boolean_as_number', None) @@ -36689,30 +50907,30 @@ class UntilActivity(ControlActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param expression: Required. An expression that would evaluate to Boolean. The loop will + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar expression: Required. An expression that would evaluate to Boolean. The loop will continue until this expression evaluates to true. - :type expression: ~azure.mgmt.datafactory.models.Expression - :param timeout: Specifies the timeout for the activity to run. If there is no value specified, + :vartype expression: ~azure.mgmt.datafactory.models.Expression + :ivar timeout: Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: any - :param activities: Required. List of activities to execute. - :type activities: list[~azure.mgmt.datafactory.models.Activity] + :vartype timeout: any + :ivar activities: Required. List of activities to execute. + :vartype activities: list[~azure.mgmt.datafactory.models.Activity] """ _validation = { @@ -36738,6 +50956,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword expression: Required. An expression that would evaluate to Boolean. The loop will + continue until this expression evaluates to true. + :paramtype expression: ~azure.mgmt.datafactory.models.Expression + :keyword timeout: Specifies the timeout for the activity to run. If there is no value + specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: string (or Expression with + resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype timeout: any + :keyword activities: Required. List of activities to execute. + :paramtype activities: list[~azure.mgmt.datafactory.models.Activity] + """ super(UntilActivity, self).__init__(**kwargs) self.type = 'Until' # type: str self.expression = kwargs['expression'] @@ -36748,9 +50990,9 @@ def __init__( class UpdateIntegrationRuntimeNodeRequest(msrest.serialization.Model): """Update integration runtime node request. - :param concurrent_jobs_limit: The number of concurrent jobs permitted to run on the integration + :ivar concurrent_jobs_limit: The number of concurrent jobs permitted to run on the integration runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed. - :type concurrent_jobs_limit: int + :vartype concurrent_jobs_limit: int """ _validation = { @@ -36765,6 +51007,11 @@ def __init__( self, **kwargs ): + """ + :keyword concurrent_jobs_limit: The number of concurrent jobs permitted to run on the + integration runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed. + :paramtype concurrent_jobs_limit: int + """ super(UpdateIntegrationRuntimeNodeRequest, self).__init__(**kwargs) self.concurrent_jobs_limit = kwargs.get('concurrent_jobs_limit', None) @@ -36772,13 +51019,13 @@ def __init__( class UpdateIntegrationRuntimeRequest(msrest.serialization.Model): """Update integration runtime request. - :param auto_update: Enables or disables the auto-update feature of the self-hosted integration + :ivar auto_update: Enables or disables the auto-update feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189. Possible values include: "On", "Off". - :type auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate - :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The + :vartype auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate + :ivar update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time. - :type update_delay_offset: str + :vartype update_delay_offset: str """ _attribute_map = { @@ -36790,6 +51037,15 @@ def __init__( self, **kwargs ): + """ + :keyword auto_update: Enables or disables the auto-update feature of the self-hosted + integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189. Possible values + include: "On", "Off". + :paramtype auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate + :keyword update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. + The integration runtime auto update will happen on that time. + :paramtype update_delay_offset: str + """ super(UpdateIntegrationRuntimeRequest, self).__init__(**kwargs) self.auto_update = kwargs.get('auto_update', None) self.update_delay_offset = kwargs.get('update_delay_offset', None) @@ -36798,20 +51054,20 @@ def __init__( class UserAccessPolicy(msrest.serialization.Model): """Get Data Plane read only token request definition. - :param permissions: The string with permissions for Data Plane access. Currently only 'r' is + :ivar permissions: The string with permissions for Data Plane access. Currently only 'r' is supported which grants read only access. - :type permissions: str - :param access_resource_path: The resource path to get access relative to factory. Currently - only empty string is supported which corresponds to the factory resource. - :type access_resource_path: str - :param profile_name: The name of the profile. Currently only the default is supported. The + :vartype permissions: str + :ivar access_resource_path: The resource path to get access relative to factory. Currently only + empty string is supported which corresponds to the factory resource. + :vartype access_resource_path: str + :ivar profile_name: The name of the profile. Currently only the default is supported. The default value is DefaultProfile. - :type profile_name: str - :param start_time: Start time for the token. If not specified the current time will be used. - :type start_time: str - :param expire_time: Expiration time for the token. Maximum duration for the token is eight - hours and by default the token will expire in eight hours. - :type expire_time: str + :vartype profile_name: str + :ivar start_time: Start time for the token. If not specified the current time will be used. + :vartype start_time: str + :ivar expire_time: Expiration time for the token. Maximum duration for the token is eight hours + and by default the token will expire in eight hours. + :vartype expire_time: str """ _attribute_map = { @@ -36826,6 +51082,22 @@ def __init__( self, **kwargs ): + """ + :keyword permissions: The string with permissions for Data Plane access. Currently only 'r' is + supported which grants read only access. + :paramtype permissions: str + :keyword access_resource_path: The resource path to get access relative to factory. Currently + only empty string is supported which corresponds to the factory resource. + :paramtype access_resource_path: str + :keyword profile_name: The name of the profile. Currently only the default is supported. The + default value is DefaultProfile. + :paramtype profile_name: str + :keyword start_time: Start time for the token. If not specified the current time will be used. + :paramtype start_time: str + :keyword expire_time: Expiration time for the token. Maximum duration for the token is eight + hours and by default the token will expire in eight hours. + :paramtype expire_time: str + """ super(UserAccessPolicy, self).__init__(**kwargs) self.permissions = kwargs.get('permissions', None) self.access_resource_path = kwargs.get('access_resource_path', None) @@ -36839,11 +51111,11 @@ class UserProperty(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param name: Required. User property name. - :type name: str - :param value: Required. User property value. Type: string (or Expression with resultType + :ivar name: Required. User property name. + :vartype name: str + :ivar value: Required. User property value. Type: string (or Expression with resultType string). - :type value: any + :vartype value: any """ _validation = { @@ -36860,6 +51132,13 @@ def __init__( self, **kwargs ): + """ + :keyword name: Required. User property name. + :paramtype name: str + :keyword value: Required. User property value. Type: string (or Expression with resultType + string). + :paramtype value: any + """ super(UserProperty, self).__init__(**kwargs) self.name = kwargs['name'] self.value = kwargs['value'] @@ -36870,36 +51149,36 @@ class ValidationActivity(ControlActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param timeout: Specifies the timeout for the activity to run. If there is no value specified, + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar timeout: Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: any - :param sleep: A delay in seconds between validation attempts. If no value is specified, 10 + :vartype timeout: any + :ivar sleep: A delay in seconds between validation attempts. If no value is specified, 10 seconds will be used as the default. Type: integer (or Expression with resultType integer). - :type sleep: any - :param minimum_size: Can be used if dataset points to a file. The file must be greater than or + :vartype sleep: any + :ivar minimum_size: Can be used if dataset points to a file. The file must be greater than or equal in size to the value specified. Type: integer (or Expression with resultType integer). - :type minimum_size: any - :param child_items: Can be used if dataset points to a folder. If set to true, the folder must + :vartype minimum_size: any + :ivar child_items: Can be used if dataset points to a folder. If set to true, the folder must have at least one file. If set to false, the folder must be empty. Type: boolean (or Expression with resultType boolean). - :type child_items: any - :param dataset: Required. Validation activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + :vartype child_items: any + :ivar dataset: Required. Validation activity dataset reference. + :vartype dataset: ~azure.mgmt.datafactory.models.DatasetReference """ _validation = { @@ -36926,6 +51205,36 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword timeout: Specifies the timeout for the activity to run. If there is no value + specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype timeout: any + :keyword sleep: A delay in seconds between validation attempts. If no value is specified, 10 + seconds will be used as the default. Type: integer (or Expression with resultType integer). + :paramtype sleep: any + :keyword minimum_size: Can be used if dataset points to a file. The file must be greater than + or equal in size to the value specified. Type: integer (or Expression with resultType integer). + :paramtype minimum_size: any + :keyword child_items: Can be used if dataset points to a folder. If set to true, the folder + must have at least one file. If set to false, the folder must be empty. Type: boolean (or + Expression with resultType boolean). + :paramtype child_items: any + :keyword dataset: Required. Validation activity dataset reference. + :paramtype dataset: ~azure.mgmt.datafactory.models.DatasetReference + """ super(ValidationActivity, self).__init__(**kwargs) self.type = 'Validation' # type: str self.timeout = kwargs.get('timeout', None) @@ -36940,10 +51249,10 @@ class VariableSpecification(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. Variable type. Possible values include: "String", "Bool", "Array". - :type type: str or ~azure.mgmt.datafactory.models.VariableType - :param default_value: Default value of variable. - :type default_value: any + :ivar type: Required. Variable type. Possible values include: "String", "Bool", "Array". + :vartype type: str or ~azure.mgmt.datafactory.models.VariableType + :ivar default_value: Default value of variable. + :vartype default_value: any """ _validation = { @@ -36959,6 +51268,12 @@ def __init__( self, **kwargs ): + """ + :keyword type: Required. Variable type. Possible values include: "String", "Bool", "Array". + :paramtype type: str or ~azure.mgmt.datafactory.models.VariableType + :keyword default_value: Default value of variable. + :paramtype default_value: any + """ super(VariableSpecification, self).__init__(**kwargs) self.type = kwargs['type'] self.default_value = kwargs.get('default_value', None) @@ -36969,28 +51284,28 @@ class VerticaLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: An ODBC connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_string: any + :ivar pwd: The Azure key vault secret reference of password in connection string. + :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -37013,6 +51328,28 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword pwd: The Azure key vault secret reference of password in connection string. + :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(VerticaLinkedService, self).__init__(**kwargs) self.type = 'Vertica' # type: str self.connection_string = kwargs.get('connection_string', None) @@ -37025,32 +51362,32 @@ class VerticaSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -37073,6 +51410,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(VerticaSource, self).__init__(**kwargs) self.type = 'VerticaSource' # type: str self.query = kwargs.get('query', None) @@ -37083,37 +51446,37 @@ class VerticaTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param table: The table name of the Vertica. Type: string (or Expression with resultType + :vartype table_name: any + :ivar table: The table name of the Vertica. Type: string (or Expression with resultType string). - :type table: any - :param schema_type_properties_schema: The schema name of the Vertica. Type: string (or + :vartype table: any + :ivar schema_type_properties_schema: The schema name of the Vertica. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any + :vartype schema_type_properties_schema: any """ _validation = { @@ -37140,6 +51503,37 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword table: The table name of the Vertica. Type: string (or Expression with resultType + string). + :paramtype table: any + :keyword schema_type_properties_schema: The schema name of the Vertica. Type: string (or + Expression with resultType string). + :paramtype schema_type_properties_schema: any + """ super(VerticaTableDataset, self).__init__(**kwargs) self.type = 'VerticaTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -37152,21 +51546,21 @@ class WaitActivity(ControlActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param wait_time_in_seconds: Required. Duration in seconds. - :type wait_time_in_seconds: any + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar wait_time_in_seconds: Required. Duration in seconds. + :vartype wait_time_in_seconds: any """ _validation = { @@ -37189,6 +51583,21 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword wait_time_in_seconds: Required. Duration in seconds. + :paramtype wait_time_in_seconds: any + """ super(WaitActivity, self).__init__(**kwargs) self.type = 'Wait' # type: str self.wait_time_in_seconds = kwargs['wait_time_in_seconds'] @@ -37199,44 +51608,44 @@ class WebActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param method: Required. Rest API method for target endpoint. Possible values include: "GET", + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar method: Required. Rest API method for target endpoint. Possible values include: "GET", "POST", "PUT", "DELETE". - :type method: str or ~azure.mgmt.datafactory.models.WebActivityMethod - :param url: Required. Web activity target endpoint and path. Type: string (or Expression with + :vartype method: str or ~azure.mgmt.datafactory.models.WebActivityMethod + :ivar url: Required. Web activity target endpoint and path. Type: string (or Expression with resultType string). - :type url: any - :param headers: Represents the headers that will be sent to the request. For example, to set - the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": + :vartype url: any + :ivar headers: Represents the headers that will be sent to the request. For example, to set the + language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - :type headers: any - :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT + :vartype headers: any + :ivar body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). - :type body: any - :param authentication: Authentication method used for calling the endpoint. - :type authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication - :param datasets: List of datasets passed to web endpoint. - :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] - :param linked_services: List of linked services passed to web endpoint. - :type linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :vartype body: any + :ivar authentication: Authentication method used for calling the endpoint. + :vartype authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication + :ivar datasets: List of datasets passed to web endpoint. + :vartype datasets: list[~azure.mgmt.datafactory.models.DatasetReference] + :ivar linked_services: List of linked services passed to web endpoint. + :vartype linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference """ _validation = { @@ -37269,6 +51678,44 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword method: Required. Rest API method for target endpoint. Possible values include: "GET", + "POST", "PUT", "DELETE". + :paramtype method: str or ~azure.mgmt.datafactory.models.WebActivityMethod + :keyword url: Required. Web activity target endpoint and path. Type: string (or Expression with + resultType string). + :paramtype url: any + :keyword headers: Represents the headers that will be sent to the request. For example, to set + the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": + "application/json" }. Type: string (or Expression with resultType string). + :paramtype headers: any + :keyword body: Represents the payload that will be sent to the endpoint. Required for POST/PUT + method, not allowed for GET method Type: string (or Expression with resultType string). + :paramtype body: any + :keyword authentication: Authentication method used for calling the endpoint. + :paramtype authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication + :keyword datasets: List of datasets passed to web endpoint. + :paramtype datasets: list[~azure.mgmt.datafactory.models.DatasetReference] + :keyword linked_services: List of linked services passed to web endpoint. + :paramtype linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + """ super(WebActivity, self).__init__(**kwargs) self.type = 'WebActivity' # type: str self.method = kwargs['method'] @@ -37284,25 +51731,24 @@ def __init__( class WebActivityAuthentication(msrest.serialization.Model): """Web activity authentication properties. - :param type: Web activity authentication (Basic/ClientCertificate/MSI/ServicePrincipal). - :type type: str - :param pfx: Base64-encoded contents of a PFX file or Certificate when used for - ServicePrincipal. - :type pfx: ~azure.mgmt.datafactory.models.SecretBase - :param username: Web activity authentication user name for basic authentication or ClientID - when used for ServicePrincipal. Type: string (or Expression with resultType string). - :type username: any - :param password: Password for the PFX file or basic authentication / Secret when used for + :ivar type: Web activity authentication (Basic/ClientCertificate/MSI/ServicePrincipal). + :vartype type: str + :ivar pfx: Base64-encoded contents of a PFX file or Certificate when used for ServicePrincipal. + :vartype pfx: ~azure.mgmt.datafactory.models.SecretBase + :ivar username: Web activity authentication user name for basic authentication or ClientID when + used for ServicePrincipal. Type: string (or Expression with resultType string). + :vartype username: any + :ivar password: Password for the PFX file or basic authentication / Secret when used for ServicePrincipal. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param resource: Resource for which Azure Auth token will be requested when using MSI + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar resource: Resource for which Azure Auth token will be requested when using MSI Authentication. Type: string (or Expression with resultType string). - :type resource: any - :param user_tenant: TenantId for which Azure Auth token will be requested when using + :vartype resource: any + :ivar user_tenant: TenantId for which Azure Auth token will be requested when using ServicePrincipal Authentication. Type: string (or Expression with resultType string). - :type user_tenant: any - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype user_tenant: any + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _attribute_map = { @@ -37319,6 +51765,27 @@ def __init__( self, **kwargs ): + """ + :keyword type: Web activity authentication (Basic/ClientCertificate/MSI/ServicePrincipal). + :paramtype type: str + :keyword pfx: Base64-encoded contents of a PFX file or Certificate when used for + ServicePrincipal. + :paramtype pfx: ~azure.mgmt.datafactory.models.SecretBase + :keyword username: Web activity authentication user name for basic authentication or ClientID + when used for ServicePrincipal. Type: string (or Expression with resultType string). + :paramtype username: any + :keyword password: Password for the PFX file or basic authentication / Secret when used for + ServicePrincipal. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword resource: Resource for which Azure Auth token will be requested when using MSI + Authentication. Type: string (or Expression with resultType string). + :paramtype resource: any + :keyword user_tenant: TenantId for which Azure Auth token will be requested when using + ServicePrincipal Authentication. Type: string (or Expression with resultType string). + :paramtype user_tenant: any + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(WebActivityAuthentication, self).__init__(**kwargs) self.type = kwargs.get('type', None) self.pfx = kwargs.get('pfx', None) @@ -37337,13 +51804,13 @@ class WebLinkedServiceTypeProperties(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . - Type: string (or Expression with resultType string). - :type url: any - :param authentication_type: Required. Type of authentication used to connect to the web table - source.Constant filled by server. Possible values include: "Basic", "Anonymous", + :ivar url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: + string (or Expression with resultType string). + :vartype url: any + :ivar authentication_type: Required. Type of authentication used to connect to the web table + source.Constant filled by server. Possible values include: "Basic", "Anonymous", "ClientCertificate". - :type authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType """ _validation = { @@ -37364,6 +51831,11 @@ def __init__( self, **kwargs ): + """ + :keyword url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . + Type: string (or Expression with resultType string). + :paramtype url: any + """ super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) self.url = kwargs['url'] self.authentication_type = None # type: Optional[str] @@ -37374,13 +51846,13 @@ class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): All required parameters must be populated in order to send to Azure. - :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . - Type: string (or Expression with resultType string). - :type url: any - :param authentication_type: Required. Type of authentication used to connect to the web table - source.Constant filled by server. Possible values include: "Basic", "Anonymous", + :ivar url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: + string (or Expression with resultType string). + :vartype url: any + :ivar authentication_type: Required. Type of authentication used to connect to the web table + source.Constant filled by server. Possible values include: "Basic", "Anonymous", "ClientCertificate". - :type authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType """ _validation = { @@ -37397,6 +51869,11 @@ def __init__( self, **kwargs ): + """ + :keyword url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . + Type: string (or Expression with resultType string). + :paramtype url: any + """ super(WebAnonymousAuthentication, self).__init__(**kwargs) self.authentication_type = 'Anonymous' # type: str @@ -37406,18 +51883,18 @@ class WebBasicAuthentication(WebLinkedServiceTypeProperties): All required parameters must be populated in order to send to Azure. - :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . - Type: string (or Expression with resultType string). - :type url: any - :param authentication_type: Required. Type of authentication used to connect to the web table - source.Constant filled by server. Possible values include: "Basic", "Anonymous", + :ivar url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: + string (or Expression with resultType string). + :vartype url: any + :ivar authentication_type: Required. Type of authentication used to connect to the web table + source.Constant filled by server. Possible values include: "Basic", "Anonymous", "ClientCertificate". - :type authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType - :param username: Required. User name for Basic authentication. Type: string (or Expression with + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType + :ivar username: Required. User name for Basic authentication. Type: string (or Expression with resultType string). - :type username: any - :param password: Required. The password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase + :vartype username: any + :ivar password: Required. The password for Basic authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -37438,6 +51915,16 @@ def __init__( self, **kwargs ): + """ + :keyword url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . + Type: string (or Expression with resultType string). + :paramtype url: any + :keyword username: Required. User name for Basic authentication. Type: string (or Expression + with resultType string). + :paramtype username: any + :keyword password: Required. The password for Basic authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + """ super(WebBasicAuthentication, self).__init__(**kwargs) self.authentication_type = 'Basic' # type: str self.username = kwargs['username'] @@ -37449,17 +51936,17 @@ class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): All required parameters must be populated in order to send to Azure. - :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . - Type: string (or Expression with resultType string). - :type url: any - :param authentication_type: Required. Type of authentication used to connect to the web table - source.Constant filled by server. Possible values include: "Basic", "Anonymous", + :ivar url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: + string (or Expression with resultType string). + :vartype url: any + :ivar authentication_type: Required. Type of authentication used to connect to the web table + source.Constant filled by server. Possible values include: "Basic", "Anonymous", "ClientCertificate". - :type authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType - :param pfx: Required. Base64-encoded contents of a PFX file. - :type pfx: ~azure.mgmt.datafactory.models.SecretBase - :param password: Required. Password for the PFX file. - :type password: ~azure.mgmt.datafactory.models.SecretBase + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType + :ivar pfx: Required. Base64-encoded contents of a PFX file. + :vartype pfx: ~azure.mgmt.datafactory.models.SecretBase + :ivar password: Required. Password for the PFX file. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -37480,6 +51967,15 @@ def __init__( self, **kwargs ): + """ + :keyword url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . + Type: string (or Expression with resultType string). + :paramtype url: any + :keyword pfx: Required. Base64-encoded contents of a PFX file. + :paramtype pfx: ~azure.mgmt.datafactory.models.SecretBase + :keyword password: Required. Password for the PFX file. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + """ super(WebClientCertificateAuthentication, self).__init__(**kwargs) self.authentication_type = 'ClientCertificate' # type: str self.pfx = kwargs['pfx'] @@ -37491,42 +51987,42 @@ class WebHookActivity(ControlActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param method: Required. Rest API method for target endpoint. Possible values include: "POST". - :type method: str or ~azure.mgmt.datafactory.models.WebHookActivityMethod - :param url: Required. WebHook activity target endpoint and path. Type: string (or Expression + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar method: Required. Rest API method for target endpoint. Possible values include: "POST". + :vartype method: str or ~azure.mgmt.datafactory.models.WebHookActivityMethod + :ivar url: Required. WebHook activity target endpoint and path. Type: string (or Expression with resultType string). - :type url: any - :param timeout: The timeout within which the webhook should be called back. If there is no - value specified, it defaults to 10 minutes. Type: string. Pattern: + :vartype url: any + :ivar timeout: The timeout within which the webhook should be called back. If there is no value + specified, it defaults to 10 minutes. Type: string. Pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: str - :param headers: Represents the headers that will be sent to the request. For example, to set - the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": + :vartype timeout: str + :ivar headers: Represents the headers that will be sent to the request. For example, to set the + language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - :type headers: any - :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT + :vartype headers: any + :ivar body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). - :type body: any - :param authentication: Authentication method used for calling the endpoint. - :type authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication - :param report_status_on_call_back: When set to true, statusCode, output and error in callback + :vartype body: any + :ivar authentication: Authentication method used for calling the endpoint. + :vartype authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication + :ivar report_status_on_call_back: When set to true, statusCode, output and error in callback request body will be consumed by activity. The activity can be marked as failed by setting statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with resultType boolean). - :type report_status_on_call_back: any + :vartype report_status_on_call_back: any """ _validation = { @@ -37556,6 +52052,43 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword method: Required. Rest API method for target endpoint. Possible values include: + "POST". + :paramtype method: str or ~azure.mgmt.datafactory.models.WebHookActivityMethod + :keyword url: Required. WebHook activity target endpoint and path. Type: string (or Expression + with resultType string). + :paramtype url: any + :keyword timeout: The timeout within which the webhook should be called back. If there is no + value specified, it defaults to 10 minutes. Type: string. Pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype timeout: str + :keyword headers: Represents the headers that will be sent to the request. For example, to set + the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": + "application/json" }. Type: string (or Expression with resultType string). + :paramtype headers: any + :keyword body: Represents the payload that will be sent to the endpoint. Required for POST/PUT + method, not allowed for GET method Type: string (or Expression with resultType string). + :paramtype body: any + :keyword authentication: Authentication method used for calling the endpoint. + :paramtype authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication + :keyword report_status_on_call_back: When set to true, statusCode, output and error in callback + request body will be consumed by activity. The activity can be marked as failed by setting + statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with + resultType boolean). + :paramtype report_status_on_call_back: any + """ super(WebHookActivity, self).__init__(**kwargs) self.type = 'WebHook' # type: str self.method = kwargs['method'] @@ -37572,21 +52105,21 @@ class WebLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param type_properties: Required. Web linked service properties. - :type type_properties: ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar type_properties: Required. Web linked service properties. + :vartype type_properties: ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties """ _validation = { @@ -37608,6 +52141,21 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword type_properties: Required. Web linked service properties. + :paramtype type_properties: ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties + """ super(WebLinkedService, self).__init__(**kwargs) self.type = 'Web' # type: str self.type_properties = kwargs['type_properties'] @@ -37618,26 +52166,26 @@ class WebSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype disable_metrics_collection: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -37658,6 +52206,26 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(WebSource, self).__init__(**kwargs) self.type = 'WebSource' # type: str self.additional_columns = kwargs.get('additional_columns', None) @@ -37668,34 +52236,34 @@ class WebTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param index: Required. The zero-based index of the table in the web page. Type: integer (or + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar index: Required. The zero-based index of the table in the web page. Type: integer (or Expression with resultType integer), minimum: 0. - :type index: any - :param path: The relative URL to the web page from the linked service URL. Type: string (or + :vartype index: any + :ivar path: The relative URL to the web page from the linked service URL. Type: string (or Expression with resultType string). - :type path: any + :vartype path: any """ _validation = { @@ -37722,6 +52290,34 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword index: Required. The zero-based index of the table in the web page. Type: integer (or + Expression with resultType integer), minimum: 0. + :paramtype index: any + :keyword path: The relative URL to the web page from the linked service URL. Type: string (or + Expression with resultType string). + :paramtype path: any + """ super(WebTableDataset, self).__init__(**kwargs) self.type = 'WebTable' # type: str self.index = kwargs['index'] @@ -37733,21 +52329,21 @@ class WranglingDataFlow(DataFlow): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of data flow.Constant filled by server. - :type type: str - :param description: The description of the data flow. - :type description: str - :param annotations: List of tags that can be used for describing the data flow. - :type annotations: list[any] - :param folder: The folder that this data flow is in. If not specified, Data flow will appear at + :ivar type: Required. Type of data flow.Constant filled by server. + :vartype type: str + :ivar description: The description of the data flow. + :vartype description: str + :ivar annotations: List of tags that can be used for describing the data flow. + :vartype annotations: list[any] + :ivar folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder - :param sources: List of sources in Power Query. - :type sources: list[~azure.mgmt.datafactory.models.PowerQuerySource] - :param script: Power query mashup script. - :type script: str - :param document_locale: Locale of the Power query mashup document. - :type document_locale: str + :vartype folder: ~azure.mgmt.datafactory.models.DataFlowFolder + :ivar sources: List of sources in Power Query. + :vartype sources: list[~azure.mgmt.datafactory.models.PowerQuerySource] + :ivar script: Power query mashup script. + :vartype script: str + :ivar document_locale: Locale of the Power query mashup document. + :vartype document_locale: str """ _validation = { @@ -37768,6 +52364,21 @@ def __init__( self, **kwargs ): + """ + :keyword description: The description of the data flow. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the data flow. + :paramtype annotations: list[any] + :keyword folder: The folder that this data flow is in. If not specified, Data flow will appear + at the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DataFlowFolder + :keyword sources: List of sources in Power Query. + :paramtype sources: list[~azure.mgmt.datafactory.models.PowerQuerySource] + :keyword script: Power query mashup script. + :paramtype script: str + :keyword document_locale: Locale of the Power query mashup document. + :paramtype document_locale: str + """ super(WranglingDataFlow, self).__init__(**kwargs) self.type = 'WranglingDataFlow' # type: str self.sources = kwargs.get('sources', None) @@ -37780,44 +52391,44 @@ class XeroLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_properties: Properties used to connect to Xero. It is mutually exclusive with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_properties: Properties used to connect to Xero. It is mutually exclusive with any other properties in the linked service. Type: object. - :type connection_properties: any - :param host: The endpoint of the Xero server. (i.e. api.xero.com). - :type host: any - :param consumer_key: The consumer key associated with the Xero application. - :type consumer_key: ~azure.mgmt.datafactory.models.SecretBase - :param private_key: The private key from the .pem file that was generated for your Xero private + :vartype connection_properties: any + :ivar host: The endpoint of the Xero server. (i.e. api.xero.com). + :vartype host: any + :ivar consumer_key: The consumer key associated with the Xero application. + :vartype consumer_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar private_key: The private key from the .pem file that was generated for your Xero private application. You must include all the text from the .pem file, including the Unix line endings( ). - :type private_key: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype private_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -37845,6 +52456,45 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_properties: Properties used to connect to Xero. It is mutually exclusive + with any other properties in the linked service. Type: object. + :paramtype connection_properties: any + :keyword host: The endpoint of the Xero server. (i.e. api.xero.com). + :paramtype host: any + :keyword consumer_key: The consumer key associated with the Xero application. + :paramtype consumer_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword private_key: The private key from the .pem file that was generated for your Xero + private application. You must include all the text from the .pem file, including the Unix line + endings( + ). + :paramtype private_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(XeroLinkedService, self).__init__(**kwargs) self.type = 'Xero' # type: str self.connection_properties = kwargs.get('connection_properties', None) @@ -37862,30 +52512,30 @@ class XeroObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -37910,6 +52560,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(XeroObjectDataset, self).__init__(**kwargs) self.type = 'XeroObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -37920,32 +52594,32 @@ class XeroSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -37968,6 +52642,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(XeroSource, self).__init__(**kwargs) self.type = 'XeroSource' # type: str self.query = kwargs.get('query', None) @@ -37978,40 +52678,40 @@ class XmlDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param location: The location of the json data storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param encoding_name: The code page name of the preferred encoding. If not specified, the + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar location: The location of the json data storage. + :vartype location: ~azure.mgmt.datafactory.models.DatasetLocation + :ivar encoding_name: The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). - :type encoding_name: any - :param null_value: The null value string. Type: string (or Expression with resultType string). - :type null_value: any - :param compression: The data compression method used for the json dataset. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + :vartype encoding_name: any + :ivar null_value: The null value string. Type: string (or Expression with resultType string). + :vartype null_value: any + :ivar compression: The data compression method used for the json dataset. + :vartype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -38039,6 +52739,41 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword location: The location of the json data storage. + :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation + :keyword encoding_name: The code page name of the preferred encoding. If not specified, the + default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column + of the table in the following link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + resultType string). + :paramtype encoding_name: any + :keyword null_value: The null value string. Type: string (or Expression with resultType + string). + :paramtype null_value: any + :keyword compression: The data compression method used for the json dataset. + :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ super(XmlDataset, self).__init__(**kwargs) self.type = 'Xml' # type: str self.location = kwargs.get('location', None) @@ -38052,27 +52787,27 @@ class XmlReadSettings(FormatReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param compression_properties: Compression settings. - :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings - :param validation_mode: Indicates what validation method is used when reading the xml files. + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar compression_properties: Compression settings. + :vartype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings + :ivar validation_mode: Indicates what validation method is used when reading the xml files. Allowed values: 'none', 'xsd', or 'dtd'. Type: string (or Expression with resultType string). - :type validation_mode: any - :param detect_data_type: Indicates whether type detection is enabled when reading the xml - files. Type: boolean (or Expression with resultType boolean). - :type detect_data_type: any - :param namespaces: Indicates whether namespace is enabled when reading the xml files. Type: + :vartype validation_mode: any + :ivar detect_data_type: Indicates whether type detection is enabled when reading the xml files. + Type: boolean (or Expression with resultType boolean). + :vartype detect_data_type: any + :ivar namespaces: Indicates whether namespace is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). - :type namespaces: any - :param namespace_prefixes: Namespace uri to prefix mappings to override the prefixes in column + :vartype namespaces: any + :ivar namespace_prefixes: Namespace uri to prefix mappings to override the prefixes in column names when namespace is enabled, if no prefix is defined for a namespace uri, the prefix of xml element/attribute name in the xml data file will be used. Example: "{"http://www.example.com/xml":"prefix"}" Type: object (or Expression with resultType object). - :type namespace_prefixes: any + :vartype namespace_prefixes: any """ _validation = { @@ -38093,6 +52828,27 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword compression_properties: Compression settings. + :paramtype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings + :keyword validation_mode: Indicates what validation method is used when reading the xml files. + Allowed values: 'none', 'xsd', or 'dtd'. Type: string (or Expression with resultType string). + :paramtype validation_mode: any + :keyword detect_data_type: Indicates whether type detection is enabled when reading the xml + files. Type: boolean (or Expression with resultType boolean). + :paramtype detect_data_type: any + :keyword namespaces: Indicates whether namespace is enabled when reading the xml files. Type: + boolean (or Expression with resultType boolean). + :paramtype namespaces: any + :keyword namespace_prefixes: Namespace uri to prefix mappings to override the prefixes in + column names when namespace is enabled, if no prefix is defined for a namespace uri, the prefix + of xml element/attribute name in the xml data file will be used. Example: + "{"http://www.example.com/xml":"prefix"}" Type: object (or Expression with resultType object). + :paramtype namespace_prefixes: any + """ super(XmlReadSettings, self).__init__(**kwargs) self.type = 'XmlReadSettings' # type: str self.compression_properties = kwargs.get('compression_properties', None) @@ -38107,30 +52863,30 @@ class XmlSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: Xml store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param format_settings: Xml format settings. - :type format_settings: ~azure.mgmt.datafactory.models.XmlReadSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype disable_metrics_collection: any + :ivar store_settings: Xml store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :ivar format_settings: Xml format settings. + :vartype format_settings: ~azure.mgmt.datafactory.models.XmlReadSettings + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -38153,6 +52909,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: Xml store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :keyword format_settings: Xml format settings. + :paramtype format_settings: ~azure.mgmt.datafactory.models.XmlReadSettings + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(XmlSource, self).__init__(**kwargs) self.type = 'XmlSource' # type: str self.store_settings = kwargs.get('store_settings', None) @@ -38165,14 +52945,14 @@ class ZipDeflateReadSettings(CompressionReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The Compression setting type.Constant filled by server. - :type type: str - :param preserve_zip_file_name_as_folder: Preserve the zip file name as folder path. Type: + :vartype additional_properties: dict[str, any] + :ivar type: Required. The Compression setting type.Constant filled by server. + :vartype type: str + :ivar preserve_zip_file_name_as_folder: Preserve the zip file name as folder path. Type: boolean (or Expression with resultType boolean). - :type preserve_zip_file_name_as_folder: any + :vartype preserve_zip_file_name_as_folder: any """ _validation = { @@ -38189,6 +52969,14 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword preserve_zip_file_name_as_folder: Preserve the zip file name as folder path. Type: + boolean (or Expression with resultType boolean). + :paramtype preserve_zip_file_name_as_folder: any + """ super(ZipDeflateReadSettings, self).__init__(**kwargs) self.type = 'ZipDeflateReadSettings' # type: str self.preserve_zip_file_name_as_folder = kwargs.get('preserve_zip_file_name_as_folder', None) @@ -38199,40 +52987,40 @@ class ZohoLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_properties: Properties used to connect to Zoho. It is mutually exclusive with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_properties: Properties used to connect to Zoho. It is mutually exclusive with any other properties in the linked service. Type: object. - :type connection_properties: any - :param endpoint: The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private). - :type endpoint: any - :param access_token: The access token for Zoho authentication. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype connection_properties: any + :ivar endpoint: The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private). + :vartype endpoint: any + :ivar access_token: The access token for Zoho authentication. + :vartype access_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -38259,6 +53047,40 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_properties: Properties used to connect to Zoho. It is mutually exclusive + with any other properties in the linked service. Type: object. + :paramtype connection_properties: any + :keyword endpoint: The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private). + :paramtype endpoint: any + :keyword access_token: The access token for Zoho authentication. + :paramtype access_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(ZohoLinkedService, self).__init__(**kwargs) self.type = 'Zoho' # type: str self.connection_properties = kwargs.get('connection_properties', None) @@ -38275,30 +53097,30 @@ class ZohoObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -38323,6 +53145,30 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(ZohoObjectDataset, self).__init__(**kwargs) self.type = 'ZohoObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -38333,32 +53179,32 @@ class ZohoSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -38381,6 +53227,32 @@ def __init__( self, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(ZohoSource, self).__init__(**kwargs) self.type = 'ZohoSource' # type: str self.query = kwargs.get('query', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index cee8a8323b7b..62dbdcd64b7a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -17,12 +17,12 @@ class AccessPolicyResponse(msrest.serialization.Model): """Get Data Plane read only token response definition. - :param policy: The user access policy. - :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy - :param access_token: Data Plane read only access token. - :type access_token: str - :param data_plane_url: Data Plane service base URL. - :type data_plane_url: str + :ivar policy: The user access policy. + :vartype policy: ~azure.mgmt.datafactory.models.UserAccessPolicy + :ivar access_token: Data Plane read only access token. + :vartype access_token: str + :ivar data_plane_url: Data Plane service base URL. + :vartype data_plane_url: str """ _attribute_map = { @@ -39,6 +39,14 @@ def __init__( data_plane_url: Optional[str] = None, **kwargs ): + """ + :keyword policy: The user access policy. + :paramtype policy: ~azure.mgmt.datafactory.models.UserAccessPolicy + :keyword access_token: Data Plane read only access token. + :paramtype access_token: str + :keyword data_plane_url: Data Plane service base URL. + :paramtype data_plane_url: str + """ super(AccessPolicyResponse, self).__init__(**kwargs) self.policy = policy self.access_token = access_token @@ -53,19 +61,19 @@ class Activity(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] """ _validation = { @@ -96,6 +104,19 @@ def __init__( user_properties: Optional[List["UserProperty"]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + """ super(Activity, self).__init__(**kwargs) self.additional_properties = additional_properties self.name = name @@ -110,13 +131,13 @@ class ActivityDependency(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param activity: Required. Activity name. - :type activity: str - :param dependency_conditions: Required. Match-Condition for the dependency. - :type dependency_conditions: list[str or ~azure.mgmt.datafactory.models.DependencyCondition] + :vartype additional_properties: dict[str, any] + :ivar activity: Required. Activity name. + :vartype activity: str + :ivar dependency_conditions: Required. Match-Condition for the dependency. + :vartype dependency_conditions: list[str or ~azure.mgmt.datafactory.models.DependencyCondition] """ _validation = { @@ -138,6 +159,16 @@ def __init__( additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword activity: Required. Activity name. + :paramtype activity: str + :keyword dependency_conditions: Required. Match-Condition for the dependency. + :paramtype dependency_conditions: list[str or + ~azure.mgmt.datafactory.models.DependencyCondition] + """ super(ActivityDependency, self).__init__(**kwargs) self.additional_properties = additional_properties self.activity = activity @@ -147,25 +178,25 @@ def __init__( class ActivityPolicy(msrest.serialization.Model): """Execution policy for an activity. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param timeout: Specifies the timeout for the activity to run. The default timeout is 7 days. + :vartype additional_properties: dict[str, any] + :ivar timeout: Specifies the timeout for the activity to run. The default timeout is 7 days. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: any - :param retry: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with + :vartype timeout: any + :ivar retry: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. - :type retry: any - :param retry_interval_in_seconds: Interval between each retry attempt (in seconds). The default + :vartype retry: any + :ivar retry_interval_in_seconds: Interval between each retry attempt (in seconds). The default is 30 sec. - :type retry_interval_in_seconds: int - :param secure_input: When set to true, Input from activity is considered as secure and will not + :vartype retry_interval_in_seconds: int + :ivar secure_input: When set to true, Input from activity is considered as secure and will not be logged to monitoring. - :type secure_input: bool - :param secure_output: When set to true, Output from activity is considered as secure and will + :vartype secure_input: bool + :ivar secure_output: When set to true, Output from activity is considered as secure and will not be logged to monitoring. - :type secure_output: bool + :vartype secure_output: bool """ _validation = { @@ -192,6 +223,27 @@ def __init__( secure_output: Optional[bool] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword timeout: Specifies the timeout for the activity to run. The default timeout is 7 days. + Type: string (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype timeout: any + :keyword retry: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression + with resultType integer), minimum: 0. + :paramtype retry: any + :keyword retry_interval_in_seconds: Interval between each retry attempt (in seconds). The + default is 30 sec. + :paramtype retry_interval_in_seconds: int + :keyword secure_input: When set to true, Input from activity is considered as secure and will + not be logged to monitoring. + :paramtype secure_input: bool + :keyword secure_output: When set to true, Output from activity is considered as secure and will + not be logged to monitoring. + :paramtype secure_output: bool + """ super(ActivityPolicy, self).__init__(**kwargs) self.additional_properties = additional_properties self.timeout = timeout @@ -206,9 +258,9 @@ class ActivityRun(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] + :vartype additional_properties: dict[str, any] :ivar pipeline_name: The name of the pipeline. :vartype pipeline_name: str :ivar pipeline_run_id: The id of the pipeline run. @@ -276,6 +328,11 @@ def __init__( additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(ActivityRun, self).__init__(**kwargs) self.additional_properties = additional_properties self.pipeline_name = None @@ -298,11 +355,11 @@ class ActivityRunsQueryResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of activity runs. - :type value: list[~azure.mgmt.datafactory.models.ActivityRun] - :param continuation_token: The continuation token for getting the next page of results, if any + :ivar value: Required. List of activity runs. + :vartype value: list[~azure.mgmt.datafactory.models.ActivityRun] + :ivar continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. - :type continuation_token: str + :vartype continuation_token: str """ _validation = { @@ -321,6 +378,13 @@ def __init__( continuation_token: Optional[str] = None, **kwargs ): + """ + :keyword value: Required. List of activity runs. + :paramtype value: list[~azure.mgmt.datafactory.models.ActivityRun] + :keyword continuation_token: The continuation token for getting the next page of results, if + any remaining results exist, null otherwise. + :paramtype continuation_token: str + """ super(ActivityRunsQueryResponse, self).__init__(**kwargs) self.value = value self.continuation_token = continuation_token @@ -329,8 +393,8 @@ def __init__( class AddDataFlowToDebugSessionResponse(msrest.serialization.Model): """Response body structure for starting data flow debug session. - :param job_version: The ID of data flow debug job version. - :type job_version: str + :ivar job_version: The ID of data flow debug job version. + :vartype job_version: str """ _attribute_map = { @@ -343,6 +407,10 @@ def __init__( job_version: Optional[str] = None, **kwargs ): + """ + :keyword job_version: The ID of data flow debug job version. + :paramtype job_version: str + """ super(AddDataFlowToDebugSessionResponse, self).__init__(**kwargs) self.job_version = job_version @@ -350,10 +418,10 @@ def __init__( class AdditionalColumns(msrest.serialization.Model): """Specify the column name and value of additional columns. - :param name: Additional column name. Type: string (or Expression with resultType string). - :type name: any - :param value: Additional column value. Type: string (or Expression with resultType string). - :type value: any + :ivar name: Additional column name. Type: string (or Expression with resultType string). + :vartype name: any + :ivar value: Additional column value. Type: string (or Expression with resultType string). + :vartype value: any """ _attribute_map = { @@ -368,6 +436,12 @@ def __init__( value: Optional[Any] = None, **kwargs ): + """ + :keyword name: Additional column name. Type: string (or Expression with resultType string). + :paramtype name: any + :keyword value: Additional column value. Type: string (or Expression with resultType string). + :paramtype value: any + """ super(AdditionalColumns, self).__init__(**kwargs) self.name = name self.value = value @@ -381,19 +455,19 @@ class LinkedService(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] """ _validation = { @@ -423,6 +497,19 @@ def __init__( annotations: Optional[List[Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + """ super(LinkedService, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = 'LinkedService' # type: str @@ -437,48 +524,47 @@ class AmazonMWSLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param endpoint: Required. The endpoint of the Amazon MWS server, (i.e. - mws.amazonservices.com). - :type endpoint: any - :param marketplace_id: Required. The Amazon Marketplace ID you want to retrieve data from. To + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar endpoint: Required. The endpoint of the Amazon MWS server, (i.e. mws.amazonservices.com). + :vartype endpoint: any + :ivar marketplace_id: Required. The Amazon Marketplace ID you want to retrieve data from. To retrieve data from multiple Marketplace IDs, separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2). - :type marketplace_id: any - :param seller_id: Required. The Amazon seller ID. - :type seller_id: any - :param mws_auth_token: The Amazon MWS authentication token. - :type mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase - :param access_key_id: Required. The access key id used to access data. - :type access_key_id: any - :param secret_key: The secret key used to access data. - :type secret_key: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype marketplace_id: any + :ivar seller_id: Required. The Amazon seller ID. + :vartype seller_id: any + :ivar mws_auth_token: The Amazon MWS authentication token. + :vartype mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar access_key_id: Required. The access key id used to access data. + :vartype access_key_id: any + :ivar secret_key: The secret key used to access data. + :vartype secret_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -528,6 +614,48 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword endpoint: Required. The endpoint of the Amazon MWS server, (i.e. + mws.amazonservices.com). + :paramtype endpoint: any + :keyword marketplace_id: Required. The Amazon Marketplace ID you want to retrieve data from. To + retrieve data from multiple Marketplace IDs, separate them with a comma (,). (i.e. + A2EUQ1WTGCTBG2). + :paramtype marketplace_id: any + :keyword seller_id: Required. The Amazon seller ID. + :paramtype seller_id: any + :keyword mws_auth_token: The Amazon MWS authentication token. + :paramtype mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword access_key_id: Required. The access key id used to access data. + :paramtype access_key_id: any + :keyword secret_key: The secret key used to access data. + :paramtype secret_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AmazonMWSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AmazonMWS' # type: str self.endpoint = endpoint @@ -550,28 +678,28 @@ class Dataset(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder """ _validation = { @@ -608,6 +736,28 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + """ super(Dataset, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = 'Dataset' # type: str @@ -625,30 +775,30 @@ class AmazonMWSObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -683,6 +833,30 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(AmazonMWSObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AmazonMWSObject' # type: str self.table_name = table_name @@ -696,23 +870,23 @@ class CopySource(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any + :vartype disable_metrics_collection: any """ _validation = { @@ -742,6 +916,23 @@ def __init__( disable_metrics_collection: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + """ super(CopySource, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = 'CopySource' # type: str @@ -759,29 +950,29 @@ class TabularSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -815,6 +1006,29 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(TabularSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'TabularSource' # type: str self.query_timeout = query_timeout @@ -826,32 +1040,32 @@ class AmazonMWSSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -883,6 +1097,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AmazonMWSSource' # type: str self.query = query @@ -893,28 +1133,28 @@ class AmazonRdsForOracleLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_string: any + :ivar password: The Azure key vault secret reference of password in connection string. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -947,6 +1187,28 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword password: The Azure key vault secret reference of password in connection string. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AmazonRdsForOracleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AmazonRdsForOracle' # type: str self.connection_string = connection_string @@ -957,19 +1219,19 @@ def __init__( class AmazonRdsForOraclePartitionSettings(msrest.serialization.Model): """The settings that will be leveraged for AmazonRdsForOracle source partitioning. - :param partition_names: Names of the physical partitions of AmazonRdsForOracle table. - :type partition_names: any - :param partition_column_name: The name of the column in integer type that will be used for + :ivar partition_names: Names of the physical partitions of AmazonRdsForOracle table. + :vartype partition_names: any + :ivar partition_column_name: The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: any - :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + :vartype partition_column_name: any + :ivar partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_upper_bound: any - :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + :vartype partition_upper_bound: any + :ivar partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_lower_bound: any + :vartype partition_lower_bound: any """ _attribute_map = { @@ -988,6 +1250,21 @@ def __init__( partition_lower_bound: Optional[Any] = None, **kwargs ): + """ + :keyword partition_names: Names of the physical partitions of AmazonRdsForOracle table. + :paramtype partition_names: any + :keyword partition_column_name: The name of the column in integer type that will be used for + proceeding range partitioning. Type: string (or Expression with resultType string). + :paramtype partition_column_name: any + :keyword partition_upper_bound: The maximum value of column specified in partitionColumnName + that will be used for proceeding range partitioning. Type: string (or Expression with + resultType string). + :paramtype partition_upper_bound: any + :keyword partition_lower_bound: The minimum value of column specified in partitionColumnName + that will be used for proceeding range partitioning. Type: string (or Expression with + resultType string). + :paramtype partition_lower_bound: any + """ super(AmazonRdsForOraclePartitionSettings, self).__init__(**kwargs) self.partition_names = partition_names self.partition_column_name = partition_column_name @@ -1000,38 +1277,38 @@ class AmazonRdsForOracleSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param oracle_reader_query: AmazonRdsForOracle reader query. Type: string (or Expression with + :vartype disable_metrics_collection: any + :ivar oracle_reader_query: AmazonRdsForOracle reader query. Type: string (or Expression with resultType string). - :type oracle_reader_query: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype oracle_reader_query: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param partition_option: The partition mechanism that will be used for AmazonRdsForOracle read + :vartype query_timeout: any + :ivar partition_option: The partition mechanism that will be used for AmazonRdsForOracle read in parallel. Type: string (or Expression with resultType string). - :type partition_option: any - :param partition_settings: The settings that will be leveraged for AmazonRdsForOracle source + :vartype partition_option: any + :ivar partition_settings: The settings that will be leveraged for AmazonRdsForOracle source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.AmazonRdsForOraclePartitionSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype partition_settings: ~azure.mgmt.datafactory.models.AmazonRdsForOraclePartitionSettings + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -1067,6 +1344,39 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword oracle_reader_query: AmazonRdsForOracle reader query. Type: string (or Expression with + resultType string). + :paramtype oracle_reader_query: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword partition_option: The partition mechanism that will be used for AmazonRdsForOracle + read in parallel. Type: string (or Expression with resultType string). + :paramtype partition_option: any + :keyword partition_settings: The settings that will be leveraged for AmazonRdsForOracle source + partitioning. + :paramtype partition_settings: + ~azure.mgmt.datafactory.models.AmazonRdsForOraclePartitionSettings + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(AmazonRdsForOracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AmazonRdsForOracleSource' # type: str self.oracle_reader_query = oracle_reader_query @@ -1081,34 +1391,34 @@ class AmazonRdsForOracleTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param schema_type_properties_schema: The schema name of the AmazonRdsForOracle database. Type: + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar schema_type_properties_schema: The schema name of the AmazonRdsForOracle database. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any - :param table: The table name of the AmazonRdsForOracle database. Type: string (or Expression + :vartype schema_type_properties_schema: any + :ivar table: The table name of the AmazonRdsForOracle database. Type: string (or Expression with resultType string). - :type table: any + :vartype table: any """ _validation = { @@ -1145,6 +1455,34 @@ def __init__( table: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword schema_type_properties_schema: The schema name of the AmazonRdsForOracle database. + Type: string (or Expression with resultType string). + :paramtype schema_type_properties_schema: any + :keyword table: The table name of the AmazonRdsForOracle database. Type: string (or Expression + with resultType string). + :paramtype table: any + """ super(AmazonRdsForOracleTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AmazonRdsForOracleTable' # type: str self.schema_type_properties_schema = schema_type_properties_schema @@ -1156,33 +1494,33 @@ class AmazonRdsForSqlServerLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param user_name: The on-premises Windows authentication user name. Type: string (or Expression + :vartype connection_string: any + :ivar user_name: The on-premises Windows authentication user name. Type: string (or Expression with resultType string). - :type user_name: any - :param password: The on-premises Windows authentication password. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype user_name: any + :ivar password: The on-premises Windows authentication password. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param always_encrypted_settings: Sql always encrypted properties. - :type always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + :vartype encrypted_credential: any + :ivar always_encrypted_settings: Sql always encrypted properties. + :vartype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties """ _validation = { @@ -1219,6 +1557,34 @@ def __init__( always_encrypted_settings: Optional["SqlAlwaysEncryptedProperties"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword user_name: The on-premises Windows authentication user name. Type: string (or + Expression with resultType string). + :paramtype user_name: any + :keyword password: The on-premises Windows authentication password. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword always_encrypted_settings: Sql always encrypted properties. + :paramtype always_encrypted_settings: + ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + """ super(AmazonRdsForSqlServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AmazonRdsForSqlServer' # type: str self.connection_string = connection_string @@ -1233,46 +1599,46 @@ class AmazonRdsForSqlServerSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: any - :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database - source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression - with resultType string). - :type sql_reader_stored_procedure_name: any - :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + :vartype additional_columns: any + :ivar sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :vartype sql_reader_query: any + :ivar sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. + This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with + resultType string). + :vartype sql_reader_stored_procedure_name: any + :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, + :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: any - :param partition_option: The partition mechanism that will be used for Sql read in parallel. + :ivar produce_additional_types: Which additional types to produce. + :vartype produce_additional_types: any + :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: any - :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings + :vartype partition_option: any + :ivar partition_settings: The settings that will be leveraged for Sql source partitioning. + :vartype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -1314,6 +1680,47 @@ def __init__( partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword sql_reader_query: SQL reader query. Type: string (or Expression with resultType + string). + :paramtype sql_reader_query: any + :keyword sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database + source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression + with resultType string). + :paramtype sql_reader_stored_procedure_name: any + :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :paramtype stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :keyword produce_additional_types: Which additional types to produce. + :paramtype produce_additional_types: any + :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :paramtype partition_option: any + :keyword partition_settings: The settings that will be leveraged for Sql source partitioning. + :paramtype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings + """ super(AmazonRdsForSqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AmazonRdsForSqlServerSource' # type: str self.sql_reader_query = sql_reader_query @@ -1329,34 +1736,34 @@ class AmazonRdsForSqlServerTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param schema_type_properties_schema: The schema name of the SQL Server dataset. Type: string + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar schema_type_properties_schema: The schema name of the SQL Server dataset. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any - :param table: The table name of the SQL Server dataset. Type: string (or Expression with + :vartype schema_type_properties_schema: any + :ivar table: The table name of the SQL Server dataset. Type: string (or Expression with resultType string). - :type table: any + :vartype table: any """ _validation = { @@ -1393,6 +1800,34 @@ def __init__( table: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword schema_type_properties_schema: The schema name of the SQL Server dataset. Type: string + (or Expression with resultType string). + :paramtype schema_type_properties_schema: any + :keyword table: The table name of the SQL Server dataset. Type: string (or Expression with + resultType string). + :paramtype table: any + """ super(AmazonRdsForSqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AmazonRdsForSqlServerTable' # type: str self.schema_type_properties_schema = schema_type_properties_schema @@ -1404,37 +1839,37 @@ class AmazonRedshiftLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param server: Required. The name of the Amazon Redshift server. Type: string (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar server: Required. The name of the Amazon Redshift server. Type: string (or Expression with resultType string). - :type server: any - :param username: The username of the Amazon Redshift source. Type: string (or Expression with + :vartype server: any + :ivar username: The username of the Amazon Redshift source. Type: string (or Expression with resultType string). - :type username: any - :param password: The password of the Amazon Redshift source. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param database: Required. The database name of the Amazon Redshift source. Type: string (or + :vartype username: any + :ivar password: The password of the Amazon Redshift source. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar database: Required. The database name of the Amazon Redshift source. Type: string (or Expression with resultType string). - :type database: any - :param port: The TCP port number that the Amazon Redshift server uses to listen for client + :vartype database: any + :ivar port: The TCP port number that the Amazon Redshift server uses to listen for client connections. The default value is 5439. Type: integer (or Expression with resultType integer). - :type port: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype port: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -1474,6 +1909,37 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword server: Required. The name of the Amazon Redshift server. Type: string (or Expression + with resultType string). + :paramtype server: any + :keyword username: The username of the Amazon Redshift source. Type: string (or Expression with + resultType string). + :paramtype username: any + :keyword password: The password of the Amazon Redshift source. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword database: Required. The database name of the Amazon Redshift source. Type: string (or + Expression with resultType string). + :paramtype database: any + :keyword port: The TCP port number that the Amazon Redshift server uses to listen for client + connections. The default value is 5439. Type: integer (or Expression with resultType integer). + :paramtype port: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AmazonRedshiftLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AmazonRedshift' # type: str self.server = server @@ -1489,35 +1955,35 @@ class AmazonRedshiftSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: Database query. Type: string (or Expression with resultType string). - :type query: any - :param redshift_unload_settings: The Amazon S3 settings needed for the interim Amazon S3 when + :vartype additional_columns: any + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: any + :ivar redshift_unload_settings: The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. - :type redshift_unload_settings: ~azure.mgmt.datafactory.models.RedshiftUnloadSettings + :vartype redshift_unload_settings: ~azure.mgmt.datafactory.models.RedshiftUnloadSettings """ _validation = { @@ -1551,6 +2017,35 @@ def __init__( redshift_unload_settings: Optional["RedshiftUnloadSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: any + :keyword redshift_unload_settings: The Amazon S3 settings needed for the interim Amazon S3 when + copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be + unloaded into S3 first and then copied into the targeted sink from the interim S3. + :paramtype redshift_unload_settings: ~azure.mgmt.datafactory.models.RedshiftUnloadSettings + """ super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AmazonRedshiftSource' # type: str self.query = query @@ -1562,37 +2057,37 @@ class AmazonRedshiftTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param table: The Amazon Redshift table name. Type: string (or Expression with resultType + :vartype table_name: any + :ivar table: The Amazon Redshift table name. Type: string (or Expression with resultType string). - :type table: any - :param schema_type_properties_schema: The Amazon Redshift schema name. Type: string (or + :vartype table: any + :ivar schema_type_properties_schema: The Amazon Redshift schema name. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any + :vartype schema_type_properties_schema: any """ _validation = { @@ -1631,6 +2126,37 @@ def __init__( schema_type_properties_schema: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword table: The Amazon Redshift table name. Type: string (or Expression with resultType + string). + :paramtype table: any + :keyword schema_type_properties_schema: The Amazon Redshift schema name. Type: string (or + Expression with resultType string). + :paramtype schema_type_properties_schema: any + """ super(AmazonRedshiftTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AmazonRedshiftTable' # type: str self.table_name = table_name @@ -1643,37 +2169,37 @@ class AmazonS3CompatibleLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param access_key_id: The access key identifier of the Amazon S3 Compatible Identity and Access + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar access_key_id: The access key identifier of the Amazon S3 Compatible Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). - :type access_key_id: any - :param secret_access_key: The secret access key of the Amazon S3 Compatible Identity and Access + :vartype access_key_id: any + :ivar secret_access_key: The secret access key of the Amazon S3 Compatible Identity and Access Management (IAM) user. - :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_url: This value specifies the endpoint to access with the Amazon S3 Compatible + :vartype secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar service_url: This value specifies the endpoint to access with the Amazon S3 Compatible Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). - :type service_url: any - :param force_path_style: If true, use S3 path-style access instead of virtual hosted-style + :vartype service_url: any + :ivar force_path_style: If true, use S3 path-style access instead of virtual hosted-style access. Default value is false. Type: boolean (or Expression with resultType boolean). - :type force_path_style: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype force_path_style: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -1709,6 +2235,37 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword access_key_id: The access key identifier of the Amazon S3 Compatible Identity and + Access Management (IAM) user. Type: string (or Expression with resultType string). + :paramtype access_key_id: any + :keyword secret_access_key: The secret access key of the Amazon S3 Compatible Identity and + Access Management (IAM) user. + :paramtype secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword service_url: This value specifies the endpoint to access with the Amazon S3 Compatible + Connector. This is an optional property; change it only if you want to try a different service + endpoint or want to switch between https and http. Type: string (or Expression with resultType + string). + :paramtype service_url: any + :keyword force_path_style: If true, use S3 path-style access instead of virtual hosted-style + access. Default value is false. Type: boolean (or Expression with resultType boolean). + :paramtype force_path_style: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AmazonS3CompatibleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AmazonS3Compatible' # type: str self.access_key_id = access_key_id @@ -1726,17 +2283,17 @@ class DatasetLocation(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any + :vartype file_name: any """ _validation = { @@ -1762,6 +2319,17 @@ def __init__( file_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + """ super(DatasetLocation, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = 'DatasetLocation' # type: str @@ -1774,23 +2342,23 @@ class AmazonS3CompatibleLocation(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any - :param bucket_name: Specify the bucketName of Amazon S3 Compatible. Type: string (or Expression + :vartype file_name: any + :ivar bucket_name: Specify the bucketName of Amazon S3 Compatible. Type: string (or Expression with resultType string). - :type bucket_name: any - :param version: Specify the version of Amazon S3 Compatible. Type: string (or Expression with + :vartype bucket_name: any + :ivar version: Specify the version of Amazon S3 Compatible. Type: string (or Expression with resultType string). - :type version: any + :vartype version: any """ _validation = { @@ -1816,6 +2384,23 @@ def __init__( version: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + :keyword bucket_name: Specify the bucketName of Amazon S3 Compatible. Type: string (or + Expression with resultType string). + :paramtype bucket_name: any + :keyword version: Specify the version of Amazon S3 Compatible. Type: string (or Expression with + resultType string). + :paramtype version: any + """ super(AmazonS3CompatibleLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) self.type = 'AmazonS3CompatibleLocation' # type: str self.bucket_name = bucket_name @@ -1830,17 +2415,17 @@ class StoreReadSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any + :vartype disable_metrics_collection: any """ _validation = { @@ -1866,6 +2451,17 @@ def __init__( disable_metrics_collection: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + """ super(StoreReadSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = 'StoreReadSettings' # type: str @@ -1878,47 +2474,47 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param wildcard_folder_path: Amazon S3 Compatible wildcardFolderPath. Type: string (or + :vartype recursive: any + :ivar wildcard_folder_path: Amazon S3 Compatible wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: any - :param wildcard_file_name: Amazon S3 Compatible wildcardFileName. Type: string (or Expression + :vartype wildcard_folder_path: any + :ivar wildcard_file_name: Amazon S3 Compatible wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: any - :param prefix: The prefix filter for the S3 Compatible object name. Type: string (or Expression + :vartype wildcard_file_name: any + :ivar prefix: The prefix filter for the S3 Compatible object name. Type: string (or Expression with resultType string). - :type prefix: any - :param file_list_path: Point to a text file that lists each file (relative path to the path + :vartype prefix: any + :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype file_list_path: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any - :param delete_files_after_completion: Indicates whether the source files need to be deleted + :vartype partition_root_path: any + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: any - :param modified_datetime_start: The start of file's modified datetime. Type: string (or + :vartype delete_files_after_completion: any + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any + :vartype modified_datetime_end: any """ _validation = { @@ -1960,6 +2556,47 @@ def __init__( modified_datetime_end: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword wildcard_folder_path: Amazon S3 Compatible wildcardFolderPath. Type: string (or + Expression with resultType string). + :paramtype wildcard_folder_path: any + :keyword wildcard_file_name: Amazon S3 Compatible wildcardFileName. Type: string (or Expression + with resultType string). + :paramtype wildcard_file_name: any + :keyword prefix: The prefix filter for the S3 Compatible object name. Type: string (or + Expression with resultType string). + :paramtype prefix: any + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: any + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + """ super(AmazonS3CompatibleReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AmazonS3CompatibleReadSettings' # type: str self.recursive = recursive @@ -1979,50 +2616,50 @@ class AmazonS3Dataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param bucket_name: Required. The name of the Amazon S3 bucket. Type: string (or Expression - with resultType string). - :type bucket_name: any - :param key: The key of the Amazon S3 object. Type: string (or Expression with resultType + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar bucket_name: Required. The name of the Amazon S3 bucket. Type: string (or Expression with + resultType string). + :vartype bucket_name: any + :ivar key: The key of the Amazon S3 object. Type: string (or Expression with resultType string). - :type key: any - :param prefix: The prefix filter for the S3 object name. Type: string (or Expression with + :vartype key: any + :ivar prefix: The prefix filter for the S3 object name. Type: string (or Expression with resultType string). - :type prefix: any - :param version: The version for the S3 object. Type: string (or Expression with resultType + :vartype prefix: any + :ivar version: The version for the S3 object. Type: string (or Expression with resultType string). - :type version: any - :param modified_datetime_start: The start of S3 object's modified datetime. Type: string (or + :vartype version: any + :ivar modified_datetime_start: The start of S3 object's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of S3 object's modified datetime. Type: string (or + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of S3 object's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any - :param format: The format of files. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the Amazon S3 object. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + :vartype modified_datetime_end: any + :ivar format: The format of files. + :vartype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :ivar compression: The data compression method used for the Amazon S3 object. + :vartype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -2072,6 +2709,50 @@ def __init__( compression: Optional["DatasetCompression"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword bucket_name: Required. The name of the Amazon S3 bucket. Type: string (or Expression + with resultType string). + :paramtype bucket_name: any + :keyword key: The key of the Amazon S3 object. Type: string (or Expression with resultType + string). + :paramtype key: any + :keyword prefix: The prefix filter for the S3 object name. Type: string (or Expression with + resultType string). + :paramtype prefix: any + :keyword version: The version for the S3 object. Type: string (or Expression with resultType + string). + :paramtype version: any + :keyword modified_datetime_start: The start of S3 object's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of S3 object's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + :keyword format: The format of files. + :paramtype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :keyword compression: The data compression method used for the Amazon S3 object. + :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AmazonS3Object' # type: str self.bucket_name = bucket_name @@ -2089,38 +2770,38 @@ class AmazonS3LinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param authentication_type: The authentication type of S3. Allowed value: AccessKey (default) - or TemporarySecurityCredentials. Type: string (or Expression with resultType string). - :type authentication_type: any - :param access_key_id: The access key identifier of the Amazon S3 Identity and Access Management + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar authentication_type: The authentication type of S3. Allowed value: AccessKey (default) or + TemporarySecurityCredentials. Type: string (or Expression with resultType string). + :vartype authentication_type: any + :ivar access_key_id: The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). - :type access_key_id: any - :param secret_access_key: The secret access key of the Amazon S3 Identity and Access Management + :vartype access_key_id: any + :ivar secret_access_key: The secret access key of the Amazon S3 Identity and Access Management (IAM) user. - :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_url: This value specifies the endpoint to access with the S3 Connector. This is + :vartype secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar service_url: This value specifies the endpoint to access with the S3 Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). - :type service_url: any - :param session_token: The session token for the S3 temporary security credential. - :type session_token: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype service_url: any + :ivar session_token: The session token for the S3 temporary security credential. + :vartype session_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -2158,6 +2839,38 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword authentication_type: The authentication type of S3. Allowed value: AccessKey (default) + or TemporarySecurityCredentials. Type: string (or Expression with resultType string). + :paramtype authentication_type: any + :keyword access_key_id: The access key identifier of the Amazon S3 Identity and Access + Management (IAM) user. Type: string (or Expression with resultType string). + :paramtype access_key_id: any + :keyword secret_access_key: The secret access key of the Amazon S3 Identity and Access + Management (IAM) user. + :paramtype secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword service_url: This value specifies the endpoint to access with the S3 Connector. This + is an optional property; change it only if you want to try a different service endpoint or want + to switch between https and http. Type: string (or Expression with resultType string). + :paramtype service_url: any + :keyword session_token: The session token for the S3 temporary security credential. + :paramtype session_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AmazonS3' # type: str self.authentication_type = authentication_type @@ -2173,23 +2886,23 @@ class AmazonS3Location(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any - :param bucket_name: Specify the bucketName of amazon S3. Type: string (or Expression with + :vartype file_name: any + :ivar bucket_name: Specify the bucketName of amazon S3. Type: string (or Expression with resultType string). - :type bucket_name: any - :param version: Specify the version of amazon S3. Type: string (or Expression with resultType + :vartype bucket_name: any + :ivar version: Specify the version of amazon S3. Type: string (or Expression with resultType string). - :type version: any + :vartype version: any """ _validation = { @@ -2215,6 +2928,23 @@ def __init__( version: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + :keyword bucket_name: Specify the bucketName of amazon S3. Type: string (or Expression with + resultType string). + :paramtype bucket_name: any + :keyword version: Specify the version of amazon S3. Type: string (or Expression with resultType + string). + :paramtype version: any + """ super(AmazonS3Location, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) self.type = 'AmazonS3Location' # type: str self.bucket_name = bucket_name @@ -2226,47 +2956,47 @@ class AmazonS3ReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or Expression with + :vartype recursive: any + :ivar wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: any - :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or Expression with + :vartype wildcard_folder_path: any + :ivar wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: any - :param prefix: The prefix filter for the S3 object name. Type: string (or Expression with + :vartype wildcard_file_name: any + :ivar prefix: The prefix filter for the S3 object name. Type: string (or Expression with resultType string). - :type prefix: any - :param file_list_path: Point to a text file that lists each file (relative path to the path + :vartype prefix: any + :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype file_list_path: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any - :param delete_files_after_completion: Indicates whether the source files need to be deleted + :vartype partition_root_path: any + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: any - :param modified_datetime_start: The start of file's modified datetime. Type: string (or + :vartype delete_files_after_completion: any + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any + :vartype modified_datetime_end: any """ _validation = { @@ -2308,6 +3038,47 @@ def __init__( modified_datetime_end: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or Expression with + resultType string). + :paramtype wildcard_folder_path: any + :keyword wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or Expression with + resultType string). + :paramtype wildcard_file_name: any + :keyword prefix: The prefix filter for the S3 object name. Type: string (or Expression with + resultType string). + :paramtype prefix: any + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: any + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + """ super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AmazonS3ReadSettings' # type: str self.recursive = recursive @@ -2330,19 +3101,19 @@ class ControlActivity(Activity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] """ _validation = { @@ -2373,6 +3144,19 @@ def __init__( user_properties: Optional[List["UserProperty"]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + """ super(ControlActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.type = 'Container' # type: str @@ -2382,23 +3166,23 @@ class AppendVariableActivity(ControlActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param variable_name: Name of the variable whose value needs to be appended to. - :type variable_name: str - :param value: Value to be appended. Could be a static value or Expression. - :type value: any + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar variable_name: Name of the variable whose value needs to be appended to. + :vartype variable_name: str + :ivar value: Value to be appended. Could be a static value or Expression. + :vartype value: any """ _validation = { @@ -2429,6 +3213,23 @@ def __init__( value: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword variable_name: Name of the variable whose value needs to be appended to. + :paramtype variable_name: str + :keyword value: Value to be appended. Could be a static value or Expression. + :paramtype value: any + """ super(AppendVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.type = 'AppendVariable' # type: str self.variable_name = variable_name @@ -2456,6 +3257,8 @@ def __init__( self, **kwargs ): + """ + """ super(ArmIdWrapper, self).__init__(**kwargs) self.id = None @@ -2465,35 +3268,35 @@ class AvroDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param location: The location of the avro storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param avro_compression_codec: The data avroCompressionCodec. Type: string (or Expression with + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar location: The location of the avro storage. + :vartype location: ~azure.mgmt.datafactory.models.DatasetLocation + :ivar avro_compression_codec: The data avroCompressionCodec. Type: string (or Expression with resultType string). - :type avro_compression_codec: any - :param avro_compression_level: - :type avro_compression_level: int + :vartype avro_compression_codec: any + :ivar avro_compression_level: + :vartype avro_compression_level: int """ _validation = { @@ -2533,6 +3336,35 @@ def __init__( avro_compression_level: Optional[int] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword location: The location of the avro storage. + :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation + :keyword avro_compression_codec: The data avroCompressionCodec. Type: string (or Expression + with resultType string). + :paramtype avro_compression_codec: any + :keyword avro_compression_level: + :paramtype avro_compression_level: int + """ super(AvroDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'Avro' # type: str self.location = location @@ -2548,15 +3380,15 @@ class DatasetStorageFormat(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage format.Constant filled by server. - :type type: str - :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: any - :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: any + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage format.Constant filled by server. + :vartype type: str + :ivar serializer: Serializer. Type: string (or Expression with resultType string). + :vartype serializer: any + :ivar deserializer: Deserializer. Type: string (or Expression with resultType string). + :vartype deserializer: any """ _validation = { @@ -2582,6 +3414,15 @@ def __init__( deserializer: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword serializer: Serializer. Type: string (or Expression with resultType string). + :paramtype serializer: any + :keyword deserializer: Deserializer. Type: string (or Expression with resultType string). + :paramtype deserializer: any + """ super(DatasetStorageFormat, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = 'DatasetStorageFormat' # type: str @@ -2594,15 +3435,15 @@ class AvroFormat(DatasetStorageFormat): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage format.Constant filled by server. - :type type: str - :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: any - :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: any + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage format.Constant filled by server. + :vartype type: str + :ivar serializer: Serializer. Type: string (or Expression with resultType string). + :vartype serializer: any + :ivar deserializer: Deserializer. Type: string (or Expression with resultType string). + :vartype deserializer: any """ _validation = { @@ -2624,6 +3465,15 @@ def __init__( deserializer: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword serializer: Serializer. Type: string (or Expression with resultType string). + :paramtype serializer: any + :keyword deserializer: Deserializer. Type: string (or Expression with resultType string). + :paramtype deserializer: any + """ super(AvroFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) self.type = 'AvroFormat' # type: str @@ -2636,29 +3486,29 @@ class CopySink(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any + :vartype disable_metrics_collection: any """ _validation = { @@ -2692,6 +3542,29 @@ def __init__( disable_metrics_collection: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + """ super(CopySink, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = 'CopySink' # type: str @@ -2708,33 +3581,33 @@ class AvroSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: Avro store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: Avro format settings. - :type format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings + :vartype disable_metrics_collection: any + :ivar store_settings: Avro store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :ivar format_settings: Avro format settings. + :vartype format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings """ _validation = { @@ -2768,6 +3641,33 @@ def __init__( format_settings: Optional["AvroWriteSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: Avro store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :keyword format_settings: Avro format settings. + :paramtype format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings + """ super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AvroSink' # type: str self.store_settings = store_settings @@ -2779,28 +3679,28 @@ class AvroSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: Avro store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype disable_metrics_collection: any + :ivar store_settings: Avro store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -2830,6 +3730,28 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: Avro store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AvroSource' # type: str self.store_settings = store_settings @@ -2844,11 +3766,11 @@ class FormatWriteSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str """ _validation = { @@ -2870,6 +3792,11 @@ def __init__( additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(FormatWriteSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = 'FormatWriteSettings' # type: str @@ -2880,22 +3807,22 @@ class AvroWriteSettings(FormatWriteSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param record_name: Top level record name in write result, which is required in AVRO spec. - :type record_name: str - :param record_namespace: Record namespace in the write result. - :type record_namespace: str - :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str + :ivar record_name: Top level record name in write result, which is required in AVRO spec. + :vartype record_name: str + :ivar record_namespace: Record namespace in the write result. + :vartype record_namespace: str + :ivar max_rows_per_file: Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). - :type max_rows_per_file: any - :param file_name_prefix: Specifies the file name pattern + :vartype max_rows_per_file: any + :ivar file_name_prefix: Specifies the file name pattern :code:``_:code:``.:code:`` when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). - :type file_name_prefix: any + :vartype file_name_prefix: any """ _validation = { @@ -2921,6 +3848,22 @@ def __init__( file_name_prefix: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword record_name: Top level record name in write result, which is required in AVRO spec. + :paramtype record_name: str + :keyword record_namespace: Record namespace in the write result. + :paramtype record_namespace: str + :keyword max_rows_per_file: Limit the written file's row count to be smaller than or equal to + the specified count. Type: integer (or Expression with resultType integer). + :paramtype max_rows_per_file: any + :keyword file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :paramtype file_name_prefix: any + """ super(AvroWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'AvroWriteSettings' # type: str self.record_name = record_name @@ -2937,8 +3880,8 @@ class CustomSetupBase(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. The type of custom setup.Constant filled by server. - :type type: str + :ivar type: Required. The type of custom setup.Constant filled by server. + :vartype type: str """ _validation = { @@ -2957,6 +3900,8 @@ def __init__( self, **kwargs ): + """ + """ super(CustomSetupBase, self).__init__(**kwargs) self.type = None # type: Optional[str] @@ -2966,10 +3911,10 @@ class AzPowerShellSetup(CustomSetupBase): All required parameters must be populated in order to send to Azure. - :param type: Required. The type of custom setup.Constant filled by server. - :type type: str - :param version: Required. The required version of Azure PowerShell to install. - :type version: str + :ivar type: Required. The type of custom setup.Constant filled by server. + :vartype type: str + :ivar version: Required. The required version of Azure PowerShell to install. + :vartype version: str """ _validation = { @@ -2988,6 +3933,10 @@ def __init__( version: str, **kwargs ): + """ + :keyword version: Required. The required version of Azure PowerShell to install. + :paramtype version: str + """ super(AzPowerShellSetup, self).__init__(**kwargs) self.type = 'AzPowerShellSetup' # type: str self.version = version @@ -2998,38 +3947,38 @@ class AzureBatchLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param account_name: Required. The Azure Batch account name. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar account_name: Required. The Azure Batch account name. Type: string (or Expression with resultType string). - :type account_name: any - :param access_key: The Azure Batch account access key. - :type access_key: ~azure.mgmt.datafactory.models.SecretBase - :param batch_uri: Required. The Azure Batch URI. Type: string (or Expression with resultType + :vartype account_name: any + :ivar access_key: The Azure Batch account access key. + :vartype access_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar batch_uri: Required. The Azure Batch URI. Type: string (or Expression with resultType string). - :type batch_uri: any - :param pool_name: Required. The Azure Batch pool name. Type: string (or Expression with + :vartype batch_uri: any + :ivar pool_name: Required. The Azure Batch pool name. Type: string (or Expression with resultType string). - :type pool_name: any - :param linked_service_name: Required. The Azure Storage linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype pool_name: any + :ivar linked_service_name: Required. The Azure Storage linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype encrypted_credential: any + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -3073,6 +4022,38 @@ def __init__( credential: Optional["CredentialReference"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword account_name: Required. The Azure Batch account name. Type: string (or Expression with + resultType string). + :paramtype account_name: any + :keyword access_key: The Azure Batch account access key. + :paramtype access_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword batch_uri: Required. The Azure Batch URI. Type: string (or Expression with resultType + string). + :paramtype batch_uri: any + :keyword pool_name: Required. The Azure Batch pool name. Type: string (or Expression with + resultType string). + :paramtype pool_name: any + :keyword linked_service_name: Required. The Azure Storage linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureBatch' # type: str self.account_name = account_name @@ -3089,47 +4070,47 @@ class AzureBlobDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param folder_path: The path of the Azure Blob storage. Type: string (or Expression with + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar folder_path: The path of the Azure Blob storage. Type: string (or Expression with resultType string). - :type folder_path: any - :param table_root_location: The root of blob path. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar table_root_location: The root of blob path. Type: string (or Expression with resultType string). - :type table_root_location: any - :param file_name: The name of the Azure Blob. Type: string (or Expression with resultType + :vartype table_root_location: any + :ivar file_name: The name of the Azure Blob. Type: string (or Expression with resultType string). - :type file_name: any - :param modified_datetime_start: The start of Azure Blob's modified datetime. Type: string (or + :vartype file_name: any + :ivar modified_datetime_start: The start of Azure Blob's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of Azure Blob's modified datetime. Type: string (or + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of Azure Blob's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any - :param format: The format of the Azure Blob storage. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the blob storage. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + :vartype modified_datetime_end: any + :ivar format: The format of the Azure Blob storage. + :vartype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :ivar compression: The data compression method used for the blob storage. + :vartype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -3176,6 +4157,47 @@ def __init__( compression: Optional["DatasetCompression"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword folder_path: The path of the Azure Blob storage. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword table_root_location: The root of blob path. Type: string (or Expression with + resultType string). + :paramtype table_root_location: any + :keyword file_name: The name of the Azure Blob. Type: string (or Expression with resultType + string). + :paramtype file_name: any + :keyword modified_datetime_start: The start of Azure Blob's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of Azure Blob's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + :keyword format: The format of the Azure Blob storage. + :paramtype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :keyword compression: The data compression method used for the blob storage. + :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ super(AzureBlobDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzureBlob' # type: str self.folder_path = folder_path @@ -3192,38 +4214,38 @@ class AzureBlobFSDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. Type: string (or + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar folder_path: The path of the Azure Data Lake Storage Gen2 storage. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: The name of the Azure Data Lake Storage Gen2. Type: string (or Expression - with resultType string). - :type file_name: any - :param format: The format of the Azure Data Lake Storage Gen2 storage. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the blob storage. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + :vartype folder_path: any + :ivar file_name: The name of the Azure Data Lake Storage Gen2. Type: string (or Expression with + resultType string). + :vartype file_name: any + :ivar format: The format of the Azure Data Lake Storage Gen2 storage. + :vartype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :ivar compression: The data compression method used for the blob storage. + :vartype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -3264,6 +4286,38 @@ def __init__( compression: Optional["DatasetCompression"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword folder_path: The path of the Azure Data Lake Storage Gen2 storage. Type: string (or + Expression with resultType string). + :paramtype folder_path: any + :keyword file_name: The name of the Azure Data Lake Storage Gen2. Type: string (or Expression + with resultType string). + :paramtype file_name: any + :keyword format: The format of the Azure Data Lake Storage Gen2 storage. + :paramtype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :keyword compression: The data compression method used for the blob storage. + :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ super(AzureBlobFSDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzureBlobFSFile' # type: str self.folder_path = folder_path @@ -3277,44 +4331,44 @@ class AzureBlobFSLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar url: Required. Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). - :type url: any - :param account_key: Account key for the Azure Data Lake Storage Gen2 service. Type: string (or + :vartype url: any + :ivar account_key: Account key for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). - :type account_key: any - :param service_principal_id: The ID of the application used to authenticate against the Azure + :vartype account_key: any + :ivar service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: The Key of the application used to authenticate against the Azure + :vartype service_principal_id: any + :ivar service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Storage Gen2 account. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: any - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string + (or Expression with resultType string). + :vartype tenant: any + :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype azure_cloud_type: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype encrypted_credential: any + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -3357,6 +4411,44 @@ def __init__( credential: Optional["CredentialReference"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword url: Required. Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or + Expression with resultType string). + :paramtype url: any + :keyword account_key: Account key for the Azure Data Lake Storage Gen2 service. Type: string + (or Expression with resultType string). + :paramtype account_key: any + :keyword service_principal_id: The ID of the application used to authenticate against the Azure + Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: The Key of the application used to authenticate against the + Azure Data Lake Storage Gen2 account. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :paramtype tenant: any + :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. + Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is + the data factory regions’ cloud type. Type: string (or Expression with resultType string). + :paramtype azure_cloud_type: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(AzureBlobFSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureBlobFS' # type: str self.url = url @@ -3374,20 +4466,20 @@ class AzureBlobFSLocation(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any - :param file_system: Specify the fileSystem of azure blobFS. Type: string (or Expression with + :vartype file_name: any + :ivar file_system: Specify the fileSystem of azure blobFS. Type: string (or Expression with resultType string). - :type file_system: any + :vartype file_system: any """ _validation = { @@ -3411,6 +4503,20 @@ def __init__( file_system: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + :keyword file_system: Specify the fileSystem of azure blobFS. Type: string (or Expression with + resultType string). + :paramtype file_system: any + """ super(AzureBlobFSLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) self.type = 'AzureBlobFSLocation' # type: str self.file_system = file_system @@ -3421,44 +4527,44 @@ class AzureBlobFSReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string (or Expression with + :vartype recursive: any + :ivar wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: any - :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or Expression with + :vartype wildcard_folder_path: any + :ivar wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: any - :param file_list_path: Point to a text file that lists each file (relative path to the path + :vartype wildcard_file_name: any + :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype file_list_path: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any - :param delete_files_after_completion: Indicates whether the source files need to be deleted + :vartype partition_root_path: any + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: any - :param modified_datetime_start: The start of file's modified datetime. Type: string (or + :vartype delete_files_after_completion: any + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any + :vartype modified_datetime_end: any """ _validation = { @@ -3498,6 +4604,44 @@ def __init__( modified_datetime_end: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string (or Expression + with resultType string). + :paramtype wildcard_folder_path: any + :keyword wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or Expression with + resultType string). + :paramtype wildcard_file_name: any + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: any + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + """ super(AzureBlobFSReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureBlobFSReadSettings' # type: str self.recursive = recursive @@ -3516,34 +4660,34 @@ class AzureBlobFSSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: any - :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects + :vartype disable_metrics_collection: any + :ivar copy_behavior: The type of copy behavior for copy sink. + :vartype copy_behavior: any + :ivar metadata: Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). - :type metadata: list[~azure.mgmt.datafactory.models.MetadataItem] + :vartype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] """ _validation = { @@ -3577,6 +4721,34 @@ def __init__( metadata: Optional[List["MetadataItem"]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword copy_behavior: The type of copy behavior for copy sink. + :paramtype copy_behavior: any + :keyword metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :paramtype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] + """ super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureBlobFSSink' # type: str self.copy_behavior = copy_behavior @@ -3588,32 +4760,32 @@ class AzureBlobFSSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType + :vartype disable_metrics_collection: any + :ivar treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). - :type treat_empty_as_null: any - :param skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or + :vartype treat_empty_as_null: any + :ivar skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). - :type skip_header_line_count: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype skip_header_line_count: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any + :vartype recursive: any """ _validation = { @@ -3645,6 +4817,32 @@ def __init__( recursive: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType + boolean). + :paramtype treat_empty_as_null: any + :keyword skip_header_line_count: Number of header lines to skip from each blob. Type: integer + (or Expression with resultType integer). + :paramtype skip_header_line_count: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + """ super(AzureBlobFSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureBlobFSSource' # type: str self.treat_empty_as_null = treat_empty_as_null @@ -3660,19 +4858,19 @@ class StoreWriteSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: any + :vartype disable_metrics_collection: any + :ivar copy_behavior: The type of copy behavior for copy sink. + :vartype copy_behavior: any """ _validation = { @@ -3700,6 +4898,19 @@ def __init__( copy_behavior: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword copy_behavior: The type of copy behavior for copy sink. + :paramtype copy_behavior: any + """ super(StoreWriteSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = 'StoreWriteSettings' # type: str @@ -3713,22 +4924,22 @@ class AzureBlobFSWriteSettings(StoreWriteSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: any - :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer + :vartype disable_metrics_collection: any + :ivar copy_behavior: The type of copy behavior for copy sink. + :vartype copy_behavior: any + :ivar block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). - :type block_size_in_mb: any + :vartype block_size_in_mb: any """ _validation = { @@ -3754,6 +4965,22 @@ def __init__( block_size_in_mb: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword copy_behavior: The type of copy behavior for copy sink. + :paramtype copy_behavior: any + :keyword block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: + integer (or Expression with resultType integer). + :paramtype block_size_in_mb: any + """ super(AzureBlobFSWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'AzureBlobFSWriteSettings' # type: str self.block_size_in_mb = block_size_in_mb @@ -3764,56 +4991,56 @@ class AzureBlobStorageLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: The connection string. It is mutually exclusive with sasUri, + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: The connection string. It is mutually exclusive with sasUri, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually exclusive with + :vartype connection_string: any + :ivar account_key: The Azure key vault secret reference of accountKey in connection string. + :vartype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually exclusive with connectionString, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: any - :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_endpoint: Blob service endpoint of the Azure Blob Storage resource. It is + :vartype sas_uri: any + :ivar sas_token: The Azure key vault secret reference of sasToken in sas uri. + :vartype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar service_endpoint: Blob service endpoint of the Azure Blob Storage resource. It is mutually exclusive with connectionString, sasUri property. - :type service_endpoint: str - :param service_principal_id: The ID of the service principal used to authenticate against Azure + :vartype service_endpoint: str + :ivar service_principal_id: The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: The key of the service principal used to authenticate against + :vartype service_principal_id: any + :ivar service_principal_key: The key of the service principal used to authenticate against Azure SQL Data Warehouse. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: any - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string + (or Expression with resultType string). + :vartype tenant: any + :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: any - :param account_kind: Specify the kind of your storage account. Allowed values are: Storage + :vartype azure_cloud_type: any + :ivar account_kind: Specify the kind of your storage account. Allowed values are: Storage (general purpose v1), StorageV2 (general purpose v2), BlobStorage, or BlockBlobStorage. Type: string (or Expression with resultType string). - :type account_kind: str - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype account_kind: str + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype encrypted_credential: str + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -3863,6 +5090,56 @@ def __init__( credential: Optional["CredentialReference"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: The connection string. It is mutually exclusive with sasUri, + serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword account_key: The Azure key vault secret reference of accountKey in connection string. + :paramtype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually exclusive with + connectionString, serviceEndpoint property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype sas_uri: any + :keyword sas_token: The Azure key vault secret reference of sasToken in sas uri. + :paramtype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword service_endpoint: Blob service endpoint of the Azure Blob Storage resource. It is + mutually exclusive with connectionString, sasUri property. + :paramtype service_endpoint: str + :keyword service_principal_id: The ID of the service principal used to authenticate against + Azure SQL Data Warehouse. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: The key of the service principal used to authenticate against + Azure SQL Data Warehouse. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :paramtype tenant: any + :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. + Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is + the data factory regions’ cloud type. Type: string (or Expression with resultType string). + :paramtype azure_cloud_type: any + :keyword account_kind: Specify the kind of your storage account. Allowed values are: Storage + (general purpose v1), StorageV2 (general purpose v2), BlobStorage, or BlockBlobStorage. Type: + string (or Expression with resultType string). + :paramtype account_kind: str + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: str + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(AzureBlobStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureBlobStorage' # type: str self.connection_string = connection_string @@ -3884,20 +5161,20 @@ class AzureBlobStorageLocation(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any - :param container: Specify the container of azure blob. Type: string (or Expression with + :vartype file_name: any + :ivar container: Specify the container of azure blob. Type: string (or Expression with resultType string). - :type container: any + :vartype container: any """ _validation = { @@ -3921,6 +5198,20 @@ def __init__( container: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + :keyword container: Specify the container of azure blob. Type: string (or Expression with + resultType string). + :paramtype container: any + """ super(AzureBlobStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) self.type = 'AzureBlobStorageLocation' # type: str self.container = container @@ -3931,47 +5222,47 @@ class AzureBlobStorageReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string (or Expression with + :vartype recursive: any + :ivar wildcard_folder_path: Azure blob wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: any - :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or Expression with + :vartype wildcard_folder_path: any + :ivar wildcard_file_name: Azure blob wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: any - :param prefix: The prefix filter for the Azure Blob name. Type: string (or Expression with + :vartype wildcard_file_name: any + :ivar prefix: The prefix filter for the Azure Blob name. Type: string (or Expression with resultType string). - :type prefix: any - :param file_list_path: Point to a text file that lists each file (relative path to the path + :vartype prefix: any + :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype file_list_path: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any - :param delete_files_after_completion: Indicates whether the source files need to be deleted + :vartype partition_root_path: any + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: any - :param modified_datetime_start: The start of file's modified datetime. Type: string (or + :vartype delete_files_after_completion: any + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any + :vartype modified_datetime_end: any """ _validation = { @@ -4013,6 +5304,47 @@ def __init__( modified_datetime_end: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword wildcard_folder_path: Azure blob wildcardFolderPath. Type: string (or Expression with + resultType string). + :paramtype wildcard_folder_path: any + :keyword wildcard_file_name: Azure blob wildcardFileName. Type: string (or Expression with + resultType string). + :paramtype wildcard_file_name: any + :keyword prefix: The prefix filter for the Azure Blob name. Type: string (or Expression with + resultType string). + :paramtype prefix: any + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: any + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + """ super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureBlobStorageReadSettings' # type: str self.recursive = recursive @@ -4032,22 +5364,22 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: any - :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer + :vartype disable_metrics_collection: any + :ivar copy_behavior: The type of copy behavior for copy sink. + :vartype copy_behavior: any + :ivar block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). - :type block_size_in_mb: any + :vartype block_size_in_mb: any """ _validation = { @@ -4073,6 +5405,22 @@ def __init__( block_size_in_mb: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword copy_behavior: The type of copy behavior for copy sink. + :paramtype copy_behavior: any + :keyword block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: + integer (or Expression with resultType integer). + :paramtype block_size_in_mb: any + """ super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'AzureBlobStorageWriteSettings' # type: str self.block_size_in_mb = block_size_in_mb @@ -4083,33 +5431,33 @@ class AzureDatabricksDeltaLakeDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table: The name of delta table. Type: string (or Expression with resultType string). - :type table: any - :param database: The database name of delta table. Type: string (or Expression with resultType + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table: The name of delta table. Type: string (or Expression with resultType string). + :vartype table: any + :ivar database: The database name of delta table. Type: string (or Expression with resultType string). - :type database: any + :vartype database: any """ _validation = { @@ -4146,6 +5494,33 @@ def __init__( database: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table: The name of delta table. Type: string (or Expression with resultType string). + :paramtype table: any + :keyword database: The database name of delta table. Type: string (or Expression with + resultType string). + :paramtype database: any + """ super(AzureDatabricksDeltaLakeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzureDatabricksDeltaLakeDataset' # type: str self.table = table @@ -4160,11 +5535,11 @@ class ExportSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The export setting type.Constant filled by server. - :type type: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. The export setting type.Constant filled by server. + :vartype type: str """ _validation = { @@ -4186,6 +5561,11 @@ def __init__( additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(ExportSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = 'ExportSettings' # type: str @@ -4196,17 +5576,17 @@ class AzureDatabricksDeltaLakeExportCommand(ExportSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The export setting type.Constant filled by server. - :type type: str - :param date_format: Specify the date format for the csv in Azure Databricks Delta Lake Copy. + :vartype additional_properties: dict[str, any] + :ivar type: Required. The export setting type.Constant filled by server. + :vartype type: str + :ivar date_format: Specify the date format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). - :type date_format: any - :param timestamp_format: Specify the timestamp format for the csv in Azure Databricks Delta - Lake Copy. Type: string (or Expression with resultType string). - :type timestamp_format: any + :vartype date_format: any + :ivar timestamp_format: Specify the timestamp format for the csv in Azure Databricks Delta Lake + Copy. Type: string (or Expression with resultType string). + :vartype timestamp_format: any """ _validation = { @@ -4228,6 +5608,17 @@ def __init__( timestamp_format: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword date_format: Specify the date format for the csv in Azure Databricks Delta Lake Copy. + Type: string (or Expression with resultType string). + :paramtype date_format: any + :keyword timestamp_format: Specify the timestamp format for the csv in Azure Databricks Delta + Lake Copy. Type: string (or Expression with resultType string). + :paramtype timestamp_format: any + """ super(AzureDatabricksDeltaLakeExportCommand, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'AzureDatabricksDeltaLakeExportCommand' # type: str self.date_format = date_format @@ -4242,11 +5633,11 @@ class ImportSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The import setting type.Constant filled by server. - :type type: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. The import setting type.Constant filled by server. + :vartype type: str """ _validation = { @@ -4268,6 +5659,11 @@ def __init__( additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(ImportSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = 'ImportSettings' # type: str @@ -4278,17 +5674,17 @@ class AzureDatabricksDeltaLakeImportCommand(ImportSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The import setting type.Constant filled by server. - :type type: str - :param date_format: Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: + :vartype additional_properties: dict[str, any] + :ivar type: Required. The import setting type.Constant filled by server. + :vartype type: str + :ivar date_format: Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). - :type date_format: any - :param timestamp_format: Specify the timestamp format for csv in Azure Databricks Delta Lake + :vartype date_format: any + :ivar timestamp_format: Specify the timestamp format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). - :type timestamp_format: any + :vartype timestamp_format: any """ _validation = { @@ -4310,6 +5706,17 @@ def __init__( timestamp_format: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword date_format: Specify the date format for csv in Azure Databricks Delta Lake Copy. + Type: string (or Expression with resultType string). + :paramtype date_format: any + :keyword timestamp_format: Specify the timestamp format for csv in Azure Databricks Delta Lake + Copy. Type: string (or Expression with resultType string). + :paramtype timestamp_format: any + """ super(AzureDatabricksDeltaLakeImportCommand, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'AzureDatabricksDeltaLakeImportCommand' # type: str self.date_format = date_format @@ -4321,33 +5728,33 @@ class AzureDatabricksDeltaLakeLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). - :type domain: any - :param access_token: Access token for databricks REST API. Refer to + :vartype domain: any + :ivar access_token: Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or AzureKeyVaultSecretReference. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param cluster_id: The id of an existing interactive cluster that will be used for all runs of + :vartype access_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar cluster_id: The id of an existing interactive cluster that will be used for all runs of this job. Type: string (or Expression with resultType string). - :type cluster_id: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype cluster_id: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -4382,6 +5789,33 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). + :paramtype domain: any + :keyword access_token: Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype access_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword cluster_id: The id of an existing interactive cluster that will be used for all runs + of this job. Type: string (or Expression with resultType string). + :paramtype cluster_id: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AzureDatabricksDeltaLakeLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureDatabricksDeltaLake' # type: str self.domain = domain @@ -4395,34 +5829,34 @@ class AzureDatabricksDeltaLakeSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + :vartype disable_metrics_collection: any + :ivar pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: any - :param import_settings: Azure Databricks Delta Lake import settings. - :type import_settings: ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeImportCommand + :vartype pre_copy_script: any + :ivar import_settings: Azure Databricks Delta Lake import settings. + :vartype import_settings: ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeImportCommand """ _validation = { @@ -4456,6 +5890,35 @@ def __init__( import_settings: Optional["AzureDatabricksDeltaLakeImportCommand"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :paramtype pre_copy_script: any + :keyword import_settings: Azure Databricks Delta Lake import settings. + :paramtype import_settings: + ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeImportCommand + """ super(AzureDatabricksDeltaLakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDatabricksDeltaLakeSink' # type: str self.pre_copy_script = pre_copy_script @@ -4467,28 +5930,28 @@ class AzureDatabricksDeltaLakeSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with - resultType string). - :type query: any - :param export_settings: Azure Databricks Delta Lake export settings. - :type export_settings: ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeExportCommand + :vartype disable_metrics_collection: any + :ivar query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with resultType + string). + :vartype query: any + :ivar export_settings: Azure Databricks Delta Lake export settings. + :vartype export_settings: ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeExportCommand """ _validation = { @@ -4518,6 +5981,29 @@ def __init__( export_settings: Optional["AzureDatabricksDeltaLakeExportCommand"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with + resultType string). + :paramtype query: any + :keyword export_settings: Azure Databricks Delta Lake export settings. + :paramtype export_settings: + ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeExportCommand + """ super(AzureDatabricksDeltaLakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDatabricksDeltaLakeSource' # type: str self.query = query @@ -4529,86 +6015,86 @@ class AzureDatabricksLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). - :type domain: any - :param access_token: Access token for databricks REST API. Refer to + :vartype domain: any + :ivar access_token: Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param authentication: Required to specify MSI, if using Workspace resource id for databricks + :vartype access_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar authentication: Required to specify MSI, if using Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). - :type authentication: any - :param workspace_resource_id: Workspace resource id for databricks REST API. Type: string (or + :vartype authentication: any + :ivar workspace_resource_id: Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). - :type workspace_resource_id: any - :param existing_cluster_id: The id of an existing interactive cluster that will be used for all + :vartype workspace_resource_id: any + :ivar existing_cluster_id: The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string). - :type existing_cluster_id: any - :param instance_pool_id: The id of an existing instance pool that will be used for all runs of + :vartype existing_cluster_id: any + :ivar instance_pool_id: The id of an existing instance pool that will be used for all runs of this activity. Type: string (or Expression with resultType string). - :type instance_pool_id: any - :param new_cluster_version: If not using an existing interactive cluster, this specifies the + :vartype instance_pool_id: any + :ivar new_cluster_version: If not using an existing interactive cluster, this specifies the Spark version of a new job cluster or instance pool nodes created for each run of this activity. Required if instancePoolId is specified. Type: string (or Expression with resultType string). - :type new_cluster_version: any - :param new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies + :vartype new_cluster_version: any + :ivar new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies the number of worker nodes to use for the new job cluster or instance pool. For new job clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto-scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is specified. Type: string (or Expression with resultType string). - :type new_cluster_num_of_worker: any - :param new_cluster_node_type: The node type of the new job cluster. This property is required - if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is + :vartype new_cluster_num_of_worker: any + :ivar new_cluster_node_type: The node type of the new job cluster. This property is required if + newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this property is ignored. Type: string (or Expression with resultType string). - :type new_cluster_node_type: any - :param new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value + :vartype new_cluster_node_type: any + :ivar new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value pairs. - :type new_cluster_spark_conf: dict[str, any] - :param new_cluster_spark_env_vars: A set of optional, user-specified Spark environment - variables key-value pairs. - :type new_cluster_spark_env_vars: dict[str, any] - :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored + :vartype new_cluster_spark_conf: dict[str, any] + :ivar new_cluster_spark_env_vars: A set of optional, user-specified Spark environment variables + key-value pairs. + :vartype new_cluster_spark_env_vars: dict[str, any] + :ivar new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored in instance pool configurations. - :type new_cluster_custom_tags: dict[str, any] - :param new_cluster_log_destination: Specify a location to deliver Spark driver, worker, and + :vartype new_cluster_custom_tags: dict[str, any] + :ivar new_cluster_log_destination: Specify a location to deliver Spark driver, worker, and event logs. Type: string (or Expression with resultType string). - :type new_cluster_log_destination: any - :param new_cluster_driver_node_type: The driver node type for the new job cluster. This - property is ignored in instance pool configurations. Type: string (or Expression with - resultType string). - :type new_cluster_driver_node_type: any - :param new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: + :vartype new_cluster_log_destination: any + :ivar new_cluster_driver_node_type: The driver node type for the new job cluster. This property + is ignored in instance pool configurations. Type: string (or Expression with resultType + string). + :vartype new_cluster_driver_node_type: any + :ivar new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: array of strings (or Expression with resultType array of strings). - :type new_cluster_init_scripts: any - :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This + :vartype new_cluster_init_scripts: any + :ivar new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This property is now ignored, and takes the default elastic disk behavior in Databricks (elastic disks are always enabled). Type: boolean (or Expression with resultType boolean). - :type new_cluster_enable_elastic_disk: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype new_cluster_enable_elastic_disk: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param policy_id: The policy id for limiting the ability to configure clusters based on a user + :vartype encrypted_credential: any + :ivar policy_id: The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string (or Expression with resultType string). - :type policy_id: any - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype policy_id: any + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -4673,6 +6159,86 @@ def __init__( credential: Optional["CredentialReference"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). + :paramtype domain: any + :keyword access_token: Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression + with resultType string). + :paramtype access_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword authentication: Required to specify MSI, if using Workspace resource id for databricks + REST API. Type: string (or Expression with resultType string). + :paramtype authentication: any + :keyword workspace_resource_id: Workspace resource id for databricks REST API. Type: string (or + Expression with resultType string). + :paramtype workspace_resource_id: any + :keyword existing_cluster_id: The id of an existing interactive cluster that will be used for + all runs of this activity. Type: string (or Expression with resultType string). + :paramtype existing_cluster_id: any + :keyword instance_pool_id: The id of an existing instance pool that will be used for all runs + of this activity. Type: string (or Expression with resultType string). + :paramtype instance_pool_id: any + :keyword new_cluster_version: If not using an existing interactive cluster, this specifies the + Spark version of a new job cluster or instance pool nodes created for each run of this + activity. Required if instancePoolId is specified. Type: string (or Expression with resultType + string). + :paramtype new_cluster_version: any + :keyword new_cluster_num_of_worker: If not using an existing interactive cluster, this + specifies the number of worker nodes to use for the new job cluster or instance pool. For new + job clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means + auto-scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and + can only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is + specified. Type: string (or Expression with resultType string). + :paramtype new_cluster_num_of_worker: any + :keyword new_cluster_node_type: The node type of the new job cluster. This property is required + if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is + specified, this property is ignored. Type: string (or Expression with resultType string). + :paramtype new_cluster_node_type: any + :keyword new_cluster_spark_conf: A set of optional, user-specified Spark configuration + key-value pairs. + :paramtype new_cluster_spark_conf: dict[str, any] + :keyword new_cluster_spark_env_vars: A set of optional, user-specified Spark environment + variables key-value pairs. + :paramtype new_cluster_spark_env_vars: dict[str, any] + :keyword new_cluster_custom_tags: Additional tags for cluster resources. This property is + ignored in instance pool configurations. + :paramtype new_cluster_custom_tags: dict[str, any] + :keyword new_cluster_log_destination: Specify a location to deliver Spark driver, worker, and + event logs. Type: string (or Expression with resultType string). + :paramtype new_cluster_log_destination: any + :keyword new_cluster_driver_node_type: The driver node type for the new job cluster. This + property is ignored in instance pool configurations. Type: string (or Expression with + resultType string). + :paramtype new_cluster_driver_node_type: any + :keyword new_cluster_init_scripts: User-defined initialization scripts for the new cluster. + Type: array of strings (or Expression with resultType array of strings). + :paramtype new_cluster_init_scripts: any + :keyword new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This + property is now ignored, and takes the default elastic disk behavior in Databricks (elastic + disks are always enabled). Type: boolean (or Expression with resultType boolean). + :paramtype new_cluster_enable_elastic_disk: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword policy_id: The policy id for limiting the ability to configure clusters based on a + user defined set of rules. Type: string (or Expression with resultType string). + :paramtype policy_id: any + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureDatabricks' # type: str self.domain = domain @@ -4704,23 +6270,23 @@ class ExecutionActivity(Activity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy """ _validation = { @@ -4755,6 +6321,23 @@ def __init__( policy: Optional["ActivityPolicy"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + """ super(ExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.type = 'Execution' # type: str self.linked_service_name = linked_service_name @@ -4766,29 +6349,29 @@ class AzureDataExplorerCommandActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param command: Required. A control command, according to the Azure Data Explorer command + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar command: Required. A control command, according to the Azure Data Explorer command syntax. Type: string (or Expression with resultType string). - :type command: any - :param command_timeout: Control command timeout. Type: string (or Expression with resultType + :vartype command: any + :ivar command_timeout: Control command timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). - :type command_timeout: any + :vartype command_timeout: any """ _validation = { @@ -4824,6 +6407,29 @@ def __init__( command_timeout: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword command: Required. A control command, according to the Azure Data Explorer command + syntax. Type: string (or Expression with resultType string). + :paramtype command: any + :keyword command_timeout: Control command timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). + :paramtype command_timeout: any + """ super(AzureDataExplorerCommandActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'AzureDataExplorerCommand' # type: str self.command = command @@ -4835,37 +6441,37 @@ class AzureDataExplorerLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL - will be in the format https://:code:``.:code:``.kusto.windows.net. - Type: string (or Expression with resultType string). - :type endpoint: any - :param service_principal_id: The ID of the service principal used to authenticate against Azure + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL will + be in the format https://:code:``.:code:``.kusto.windows.net. Type: + string (or Expression with resultType string). + :vartype endpoint: any + :ivar service_principal_id: The ID of the service principal used to authenticate against Azure Data Explorer. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: The key of the service principal used to authenticate against + :vartype service_principal_id: any + :ivar service_principal_key: The key of the service principal used to authenticate against Kusto. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param database: Required. Database name for connection. Type: string (or Expression with + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar database: Required. Database name for connection. Type: string (or Expression with resultType string). - :type database: any - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: any - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype database: any + :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string + (or Expression with resultType string). + :vartype tenant: any + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -4905,6 +6511,37 @@ def __init__( credential: Optional["CredentialReference"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL + will be in the format https://:code:``.:code:``.kusto.windows.net. + Type: string (or Expression with resultType string). + :paramtype endpoint: any + :keyword service_principal_id: The ID of the service principal used to authenticate against + Azure Data Explorer. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: The key of the service principal used to authenticate against + Kusto. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword database: Required. Database name for connection. Type: string (or Expression with + resultType string). + :paramtype database: any + :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :paramtype tenant: any + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureDataExplorer' # type: str self.endpoint = endpoint @@ -4920,38 +6557,38 @@ class AzureDataExplorerSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the + :vartype disable_metrics_collection: any + :ivar ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the target Kusto table. Type: string. - :type ingestion_mapping_name: any - :param ingestion_mapping_as_json: An explicit column mapping description provided in a json + :vartype ingestion_mapping_name: any + :ivar ingestion_mapping_as_json: An explicit column mapping description provided in a json format. Type: string. - :type ingestion_mapping_as_json: any - :param flush_immediately: If set to true, any aggregation will be skipped. Default is false. + :vartype ingestion_mapping_as_json: any + :ivar flush_immediately: If set to true, any aggregation will be skipped. Default is false. Type: boolean. - :type flush_immediately: any + :vartype flush_immediately: any """ _validation = { @@ -4987,6 +6624,38 @@ def __init__( flush_immediately: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the + target Kusto table. Type: string. + :paramtype ingestion_mapping_name: any + :keyword ingestion_mapping_as_json: An explicit column mapping description provided in a json + format. Type: string. + :paramtype ingestion_mapping_as_json: any + :keyword flush_immediately: If set to true, any aggregation will be skipped. Default is false. + Type: boolean. + :paramtype flush_immediately: any + """ super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataExplorerSink' # type: str self.ingestion_mapping_name = ingestion_mapping_name @@ -4999,35 +6668,35 @@ class AzureDataExplorerSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type: + :vartype disable_metrics_collection: any + :ivar query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type: string (or Expression with resultType string). - :type query: any - :param no_truncation: The name of the Boolean option that controls whether truncation is - applied to result-sets that go beyond a certain row-count limit. - :type no_truncation: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype query: any + :ivar no_truncation: The name of the Boolean option that controls whether truncation is applied + to result-sets that go beyond a certain row-count limit. + :vartype no_truncation: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -5062,6 +6731,35 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type: + string (or Expression with resultType string). + :paramtype query: any + :keyword no_truncation: The name of the Boolean option that controls whether truncation is + applied to result-sets that go beyond a certain row-count limit. + :paramtype no_truncation: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataExplorerSource' # type: str self.query = query @@ -5075,31 +6773,31 @@ class AzureDataExplorerTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table: The table name of the Azure Data Explorer database. Type: string (or Expression + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table: The table name of the Azure Data Explorer database. Type: string (or Expression with resultType string). - :type table: any + :vartype table: any """ _validation = { @@ -5134,6 +6832,31 @@ def __init__( table: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table: The table name of the Azure Data Explorer database. Type: string (or Expression + with resultType string). + :paramtype table: any + """ super(AzureDataExplorerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzureDataExplorerTable' # type: str self.table = table @@ -5144,44 +6867,44 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param account_name: Required. The Azure Data Lake Analytics account name. Type: string (or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar account_name: Required. The Azure Data Lake Analytics account name. Type: string (or Expression with resultType string). - :type account_name: any - :param service_principal_id: The ID of the application used to authenticate against the Azure + :vartype account_name: any + :ivar service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Analytics account. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: The Key of the application used to authenticate against the Azure + :vartype service_principal_id: any + :ivar service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Analytics account. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. The name or ID of the tenant to which the service principal belongs. + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: Required. The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: any - :param subscription_id: Data Lake Analytics account subscription ID (if different from Data + :vartype tenant: any + :ivar subscription_id: Data Lake Analytics account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). - :type subscription_id: any - :param resource_group_name: Data Lake Analytics account resource group name (if different from + :vartype subscription_id: any + :ivar resource_group_name: Data Lake Analytics account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). - :type resource_group_name: any - :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string (or Expression with + :vartype resource_group_name: any + :ivar data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string (or Expression with resultType string). - :type data_lake_analytics_uri: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype data_lake_analytics_uri: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -5225,6 +6948,44 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword account_name: Required. The Azure Data Lake Analytics account name. Type: string (or + Expression with resultType string). + :paramtype account_name: any + :keyword service_principal_id: The ID of the application used to authenticate against the Azure + Data Lake Analytics account. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: The Key of the application used to authenticate against the + Azure Data Lake Analytics account. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: Required. The name or ID of the tenant to which the service principal belongs. + Type: string (or Expression with resultType string). + :paramtype tenant: any + :keyword subscription_id: Data Lake Analytics account subscription ID (if different from Data + Factory account). Type: string (or Expression with resultType string). + :paramtype subscription_id: any + :keyword resource_group_name: Data Lake Analytics account resource group name (if different + from Data Factory account). Type: string (or Expression with resultType string). + :paramtype resource_group_name: any + :keyword data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string (or Expression + with resultType string). + :paramtype data_lake_analytics_uri: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AzureDataLakeAnalyticsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureDataLakeAnalytics' # type: str self.account_name = account_name @@ -5242,39 +7003,39 @@ class AzureDataLakeStoreDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param folder_path: Path to the folder in the Azure Data Lake Store. Type: string (or - Expression with resultType string). - :type folder_path: any - :param file_name: The name of the file in the Azure Data Lake Store. Type: string (or - Expression with resultType string). - :type file_name: any - :param format: The format of the Data Lake Store. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the item(s) in the Azure Data Lake + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar folder_path: Path to the folder in the Azure Data Lake Store. Type: string (or Expression + with resultType string). + :vartype folder_path: any + :ivar file_name: The name of the file in the Azure Data Lake Store. Type: string (or Expression + with resultType string). + :vartype file_name: any + :ivar format: The format of the Data Lake Store. + :vartype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :ivar compression: The data compression method used for the item(s) in the Azure Data Lake Store. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + :vartype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -5315,6 +7076,39 @@ def __init__( compression: Optional["DatasetCompression"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword folder_path: Path to the folder in the Azure Data Lake Store. Type: string (or + Expression with resultType string). + :paramtype folder_path: any + :keyword file_name: The name of the file in the Azure Data Lake Store. Type: string (or + Expression with resultType string). + :paramtype file_name: any + :keyword format: The format of the Data Lake Store. + :paramtype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :keyword compression: The data compression method used for the item(s) in the Azure Data Lake + Store. + :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ super(AzureDataLakeStoreDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzureDataLakeStoreFile' # type: str self.folder_path = folder_path @@ -5328,50 +7122,50 @@ class AzureDataLakeStoreLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param data_lake_store_uri: Required. Data Lake Store service URI. Type: string (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar data_lake_store_uri: Required. Data Lake Store service URI. Type: string (or Expression with resultType string). - :type data_lake_store_uri: any - :param service_principal_id: The ID of the application used to authenticate against the Azure + :vartype data_lake_store_uri: any + :ivar service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Store account. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: The Key of the application used to authenticate against the Azure + :vartype service_principal_id: any + :ivar service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Store account. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: any - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string + (or Expression with resultType string). + :vartype tenant: any + :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: any - :param account_name: Data Lake Store account name. Type: string (or Expression with resultType + :vartype azure_cloud_type: any + :ivar account_name: Data Lake Store account name. Type: string (or Expression with resultType string). - :type account_name: any - :param subscription_id: Data Lake Store account subscription ID (if different from Data Factory + :vartype account_name: any + :ivar subscription_id: Data Lake Store account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). - :type subscription_id: any - :param resource_group_name: Data Lake Store account resource group name (if different from Data + :vartype subscription_id: any + :ivar resource_group_name: Data Lake Store account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). - :type resource_group_name: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype resource_group_name: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype encrypted_credential: any + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -5418,6 +7212,50 @@ def __init__( credential: Optional["CredentialReference"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword data_lake_store_uri: Required. Data Lake Store service URI. Type: string (or + Expression with resultType string). + :paramtype data_lake_store_uri: any + :keyword service_principal_id: The ID of the application used to authenticate against the Azure + Data Lake Store account. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: The Key of the application used to authenticate against the + Azure Data Lake Store account. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :paramtype tenant: any + :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. + Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is + the data factory regions’ cloud type. Type: string (or Expression with resultType string). + :paramtype azure_cloud_type: any + :keyword account_name: Data Lake Store account name. Type: string (or Expression with + resultType string). + :paramtype account_name: any + :keyword subscription_id: Data Lake Store account subscription ID (if different from Data + Factory account). Type: string (or Expression with resultType string). + :paramtype subscription_id: any + :keyword resource_group_name: Data Lake Store account resource group name (if different from + Data Factory account). Type: string (or Expression with resultType string). + :paramtype resource_group_name: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(AzureDataLakeStoreLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureDataLakeStore' # type: str self.data_lake_store_uri = data_lake_store_uri @@ -5437,17 +7275,17 @@ class AzureDataLakeStoreLocation(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any + :vartype file_name: any """ _validation = { @@ -5469,6 +7307,17 @@ def __init__( file_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + """ super(AzureDataLakeStoreLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) self.type = 'AzureDataLakeStoreLocation' # type: str @@ -5478,52 +7327,52 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or Expression with + :vartype recursive: any + :ivar wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: any - :param wildcard_file_name: ADLS wildcardFileName. Type: string (or Expression with resultType + :vartype wildcard_folder_path: any + :ivar wildcard_file_name: ADLS wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: any - :param file_list_path: Point to a text file that lists each file (relative path to the path + :vartype wildcard_file_name: any + :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: any - :param list_after: Lists files after the value (exclusive) based on file/folder names’ + :vartype file_list_path: any + :ivar list_after: Lists files after the value (exclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). - :type list_after: any - :param list_before: Lists files before the value (inclusive) based on file/folder names’ + :vartype list_after: any + :ivar list_before: Lists files before the value (inclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). - :type list_before: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype list_before: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any - :param delete_files_after_completion: Indicates whether the source files need to be deleted + :vartype partition_root_path: any + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: any - :param modified_datetime_start: The start of file's modified datetime. Type: string (or + :vartype delete_files_after_completion: any + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any + :vartype modified_datetime_end: any """ _validation = { @@ -5567,6 +7416,52 @@ def __init__( modified_datetime_end: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or Expression with + resultType string). + :paramtype wildcard_folder_path: any + :keyword wildcard_file_name: ADLS wildcardFileName. Type: string (or Expression with resultType + string). + :paramtype wildcard_file_name: any + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: any + :keyword list_after: Lists files after the value (exclusive) based on file/folder names’ + lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders + under the folderPath. Type: string (or Expression with resultType string). + :paramtype list_after: any + :keyword list_before: Lists files before the value (inclusive) based on file/folder names’ + lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders + under the folderPath. Type: string (or Expression with resultType string). + :paramtype list_before: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: any + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + """ super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataLakeStoreReadSettings' # type: str self.recursive = recursive @@ -5587,33 +7482,33 @@ class AzureDataLakeStoreSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: any - :param enable_adls_single_file_parallel: Single File Parallel. - :type enable_adls_single_file_parallel: any + :vartype disable_metrics_collection: any + :ivar copy_behavior: The type of copy behavior for copy sink. + :vartype copy_behavior: any + :ivar enable_adls_single_file_parallel: Single File Parallel. + :vartype enable_adls_single_file_parallel: any """ _validation = { @@ -5647,6 +7542,33 @@ def __init__( enable_adls_single_file_parallel: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword copy_behavior: The type of copy behavior for copy sink. + :paramtype copy_behavior: any + :keyword enable_adls_single_file_parallel: Single File Parallel. + :paramtype enable_adls_single_file_parallel: any + """ super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataLakeStoreSink' # type: str self.copy_behavior = copy_behavior @@ -5658,26 +7580,26 @@ class AzureDataLakeStoreSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any + :vartype recursive: any """ _validation = { @@ -5705,6 +7627,26 @@ def __init__( recursive: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + """ super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataLakeStoreSource' # type: str self.recursive = recursive @@ -5715,23 +7657,23 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: any - :param expiry_date_time: Specifies the expiry time of the written files. The time is applied to + :vartype disable_metrics_collection: any + :ivar copy_behavior: The type of copy behavior for copy sink. + :vartype copy_behavior: any + :ivar expiry_date_time: Specifies the expiry time of the written files. The time is applied to the UTC time zone in the format of "2018-12-01T05:00:00Z". Default value is NULL. Type: integer (or Expression with resultType integer). - :type expiry_date_time: any + :vartype expiry_date_time: any """ _validation = { @@ -5757,6 +7699,23 @@ def __init__( expiry_date_time: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword copy_behavior: The type of copy behavior for copy sink. + :paramtype copy_behavior: any + :keyword expiry_date_time: Specifies the expiry time of the written files. The time is applied + to the UTC time zone in the format of "2018-12-01T05:00:00Z". Default value is NULL. Type: + integer (or Expression with resultType integer). + :paramtype expiry_date_time: any + """ super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'AzureDataLakeStoreWriteSettings' # type: str self.expiry_date_time = expiry_date_time @@ -5767,46 +7726,46 @@ class AzureFileStorageLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Host name of the server. Type: string (or Expression with resultType string). - :type host: any - :param user_id: User ID to logon the server. Type: string (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Host name of the server. Type: string (or Expression with resultType string). + :vartype host: any + :ivar user_id: User ID to logon the server. Type: string (or Expression with resultType string). - :type user_id: any - :param password: Password to logon the server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param connection_string: The connection string. It is mutually exclusive with sasUri property. + :vartype user_id: any + :ivar password: Password to logon the server. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure File resource. It is mutually exclusive with + :vartype connection_string: any + :ivar account_key: The Azure key vault secret reference of accountKey in connection string. + :vartype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar sas_uri: SAS URI of the Azure File resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: any - :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param file_share: The azure file share name. It is required when auth with - accountKey/sasToken. Type: string (or Expression with resultType string). - :type file_share: any - :param snapshot: The azure file share snapshot version. Type: string (or Expression with + :vartype sas_uri: any + :ivar sas_token: The Azure key vault secret reference of sasToken in sas uri. + :vartype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar file_share: The azure file share name. It is required when auth with accountKey/sasToken. + Type: string (or Expression with resultType string). + :vartype file_share: any + :ivar snapshot: The azure file share snapshot version. Type: string (or Expression with resultType string). - :type snapshot: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype snapshot: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -5852,6 +7811,46 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Host name of the server. Type: string (or Expression with resultType string). + :paramtype host: any + :keyword user_id: User ID to logon the server. Type: string (or Expression with resultType + string). + :paramtype user_id: any + :keyword password: Password to logon the server. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword connection_string: The connection string. It is mutually exclusive with sasUri + property. Type: string, SecureString or AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword account_key: The Azure key vault secret reference of accountKey in connection string. + :paramtype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword sas_uri: SAS URI of the Azure File resource. It is mutually exclusive with + connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. + :paramtype sas_uri: any + :keyword sas_token: The Azure key vault secret reference of sasToken in sas uri. + :paramtype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword file_share: The azure file share name. It is required when auth with + accountKey/sasToken. Type: string (or Expression with resultType string). + :paramtype file_share: any + :keyword snapshot: The azure file share snapshot version. Type: string (or Expression with + resultType string). + :paramtype snapshot: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AzureFileStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureFileStorage' # type: str self.host = host @@ -5871,17 +7870,17 @@ class AzureFileStorageLocation(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any + :vartype file_name: any """ _validation = { @@ -5903,6 +7902,17 @@ def __init__( file_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + """ super(AzureFileStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) self.type = 'AzureFileStorageLocation' # type: str @@ -5912,47 +7922,47 @@ class AzureFileStorageReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param wildcard_folder_path: Azure File Storage wildcardFolderPath. Type: string (or Expression + :vartype recursive: any + :ivar wildcard_folder_path: Azure File Storage wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: any - :param wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression - with resultType string). - :type wildcard_file_name: any - :param prefix: The prefix filter for the Azure File name starting from root path. Type: string + :vartype wildcard_folder_path: any + :ivar wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression with + resultType string). + :vartype wildcard_file_name: any + :ivar prefix: The prefix filter for the Azure File name starting from root path. Type: string (or Expression with resultType string). - :type prefix: any - :param file_list_path: Point to a text file that lists each file (relative path to the path + :vartype prefix: any + :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype file_list_path: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any - :param delete_files_after_completion: Indicates whether the source files need to be deleted + :vartype partition_root_path: any + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: any - :param modified_datetime_start: The start of file's modified datetime. Type: string (or + :vartype delete_files_after_completion: any + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any + :vartype modified_datetime_end: any """ _validation = { @@ -5994,6 +8004,47 @@ def __init__( modified_datetime_end: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword wildcard_folder_path: Azure File Storage wildcardFolderPath. Type: string (or + Expression with resultType string). + :paramtype wildcard_folder_path: any + :keyword wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression + with resultType string). + :paramtype wildcard_file_name: any + :keyword prefix: The prefix filter for the Azure File name starting from root path. Type: + string (or Expression with resultType string). + :paramtype prefix: any + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: any + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + """ super(AzureFileStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureFileStorageReadSettings' # type: str self.recursive = recursive @@ -6013,19 +8064,19 @@ class AzureFileStorageWriteSettings(StoreWriteSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: any + :vartype disable_metrics_collection: any + :ivar copy_behavior: The type of copy behavior for copy sink. + :vartype copy_behavior: any """ _validation = { @@ -6049,6 +8100,19 @@ def __init__( copy_behavior: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword copy_behavior: The type of copy behavior for copy sink. + :paramtype copy_behavior: any + """ super(AzureFileStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'AzureFileStorageWriteSettings' # type: str @@ -6058,36 +8122,36 @@ class AzureFunctionActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param method: Required. Rest API method for target endpoint. Possible values include: "GET", + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar method: Required. Rest API method for target endpoint. Possible values include: "GET", "POST", "PUT", "DELETE", "OPTIONS", "HEAD", "TRACE". - :type method: str or ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod - :param function_name: Required. Name of the Function that the Azure Function Activity will - call. Type: string (or Expression with resultType string). - :type function_name: any - :param headers: Represents the headers that will be sent to the request. For example, to set - the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": + :vartype method: str or ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod + :ivar function_name: Required. Name of the Function that the Azure Function Activity will call. + Type: string (or Expression with resultType string). + :vartype function_name: any + :ivar headers: Represents the headers that will be sent to the request. For example, to set the + language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - :type headers: any - :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT + :vartype headers: any + :ivar body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). - :type body: any + :vartype body: any """ _validation = { @@ -6128,6 +8192,36 @@ def __init__( body: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword method: Required. Rest API method for target endpoint. Possible values include: "GET", + "POST", "PUT", "DELETE", "OPTIONS", "HEAD", "TRACE". + :paramtype method: str or ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod + :keyword function_name: Required. Name of the Function that the Azure Function Activity will + call. Type: string (or Expression with resultType string). + :paramtype function_name: any + :keyword headers: Represents the headers that will be sent to the request. For example, to set + the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": + "application/json" }. Type: string (or Expression with resultType string). + :paramtype headers: any + :keyword body: Represents the payload that will be sent to the endpoint. Required for POST/PUT + method, not allowed for GET method Type: string (or Expression with resultType string). + :paramtype body: any + """ super(AzureFunctionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'AzureFunctionActivity' # type: str self.method = method @@ -6141,35 +8235,35 @@ class AzureFunctionLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param function_app_url: Required. The endpoint of the Azure Function App. URL will be in the + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar function_app_url: Required. The endpoint of the Azure Function App. URL will be in the format https://:code:``.azurewebsites.net. - :type function_app_url: any - :param function_key: Function or Host key for Azure Function App. - :type function_key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype function_app_url: any + :ivar function_key: Function or Host key for Azure Function App. + :vartype function_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference - :param resource_id: Allowed token audiences for azure function. - :type resource_id: any - :param authentication: Type of authentication (Required to specify MSI) used to connect to + :vartype encrypted_credential: any + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference + :ivar resource_id: Allowed token audiences for azure function. + :vartype resource_id: any + :ivar authentication: Type of authentication (Required to specify MSI) used to connect to AzureFunction. Type: string (or Expression with resultType string). - :type authentication: any + :vartype authentication: any """ _validation = { @@ -6208,6 +8302,35 @@ def __init__( authentication: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword function_app_url: Required. The endpoint of the Azure Function App. URL will be in the + format https://:code:``.azurewebsites.net. + :paramtype function_app_url: any + :keyword function_key: Function or Host key for Azure Function App. + :paramtype function_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + :keyword resource_id: Allowed token audiences for azure function. + :paramtype resource_id: any + :keyword authentication: Type of authentication (Required to specify MSI) used to connect to + AzureFunction. Type: string (or Expression with resultType string). + :paramtype authentication: any + """ super(AzureFunctionLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureFunction' # type: str self.function_app_url = function_app_url @@ -6223,24 +8346,24 @@ class AzureKeyVaultLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param base_url: Required. The base URL of the Azure Key Vault. e.g. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar base_url: Required. The base URL of the Azure Key Vault. e.g. https://myakv.vault.azure.net Type: string (or Expression with resultType string). - :type base_url: any - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype base_url: any + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -6271,6 +8394,24 @@ def __init__( credential: Optional["CredentialReference"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword base_url: Required. The base URL of the Azure Key Vault. e.g. + https://myakv.vault.azure.net Type: string (or Expression with resultType string). + :paramtype base_url: any + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(AzureKeyVaultLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureKeyVault' # type: str self.base_url = base_url @@ -6285,8 +8426,8 @@ class SecretBase(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of the secret.Constant filled by server. - :type type: str + :ivar type: Required. Type of the secret.Constant filled by server. + :vartype type: str """ _validation = { @@ -6305,6 +8446,8 @@ def __init__( self, **kwargs ): + """ + """ super(SecretBase, self).__init__(**kwargs) self.type = None # type: Optional[str] @@ -6314,16 +8457,16 @@ class AzureKeyVaultSecretReference(SecretBase): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of the secret.Constant filled by server. - :type type: str - :param store: Required. The Azure Key Vault linked service reference. - :type store: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param secret_name: Required. The name of the secret in Azure Key Vault. Type: string (or + :ivar type: Required. Type of the secret.Constant filled by server. + :vartype type: str + :ivar store: Required. The Azure Key Vault linked service reference. + :vartype store: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar secret_name: Required. The name of the secret in Azure Key Vault. Type: string (or Expression with resultType string). - :type secret_name: any - :param secret_version: The version of the secret in Azure Key Vault. The default value is the + :vartype secret_name: any + :ivar secret_version: The version of the secret in Azure Key Vault. The default value is the latest version of the secret. Type: string (or Expression with resultType string). - :type secret_version: any + :vartype secret_version: any """ _validation = { @@ -6347,6 +8490,16 @@ def __init__( secret_version: Optional[Any] = None, **kwargs ): + """ + :keyword store: Required. The Azure Key Vault linked service reference. + :paramtype store: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword secret_name: Required. The name of the secret in Azure Key Vault. Type: string (or + Expression with resultType string). + :paramtype secret_name: any + :keyword secret_version: The version of the secret in Azure Key Vault. The default value is the + latest version of the secret. Type: string (or Expression with resultType string). + :paramtype secret_version: any + """ super(AzureKeyVaultSecretReference, self).__init__(**kwargs) self.type = 'AzureKeyVaultSecret' # type: str self.store = store @@ -6359,28 +8512,28 @@ class AzureMariaDBLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: An ODBC connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_string: any + :ivar pwd: The Azure key vault secret reference of password in connection string. + :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -6412,6 +8565,28 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword pwd: The Azure key vault secret reference of password in connection string. + :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AzureMariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureMariaDB' # type: str self.connection_string = connection_string @@ -6424,32 +8599,32 @@ class AzureMariaDBSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -6481,6 +8656,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureMariaDBSource' # type: str self.query = query @@ -6491,30 +8692,30 @@ class AzureMariaDBTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -6549,6 +8750,30 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(AzureMariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzureMariaDBTable' # type: str self.table_name = table_name @@ -6559,38 +8784,37 @@ class AzureMLBatchExecutionActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param global_parameters: Key,Value pairs to be passed to the Azure ML Batch Execution Service + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar global_parameters: Key,Value pairs to be passed to the Azure ML Batch Execution Service endpoint. Keys must match the names of web service parameters defined in the published Azure ML web service. Values will be passed in the GlobalParameters property of the Azure ML batch execution request. - :type global_parameters: dict[str, any] - :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web + :vartype global_parameters: dict[str, any] + :ivar web_service_outputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This information will be passed in the WebServiceOutputs property of the Azure ML batch execution request. - :type web_service_outputs: dict[str, ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] - :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web - Service Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This - information will be passed in the WebServiceInputs property of the Azure ML batch execution - request. - :type web_service_inputs: dict[str, ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] + :vartype web_service_outputs: dict[str, ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] + :ivar web_service_inputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web Service + Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This information + will be passed in the WebServiceInputs property of the Azure ML batch execution request. + :vartype web_service_inputs: dict[str, ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] """ _validation = { @@ -6627,6 +8851,38 @@ def __init__( web_service_inputs: Optional[Dict[str, "AzureMLWebServiceFile"]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword global_parameters: Key,Value pairs to be passed to the Azure ML Batch Execution + Service endpoint. Keys must match the names of web service parameters defined in the published + Azure ML web service. Values will be passed in the GlobalParameters property of the Azure ML + batch execution request. + :paramtype global_parameters: dict[str, any] + :keyword web_service_outputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web + Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This + information will be passed in the WebServiceOutputs property of the Azure ML batch execution + request. + :paramtype web_service_outputs: dict[str, ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] + :keyword web_service_inputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web + Service Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This + information will be passed in the WebServiceInputs property of the Azure ML batch execution + request. + :paramtype web_service_inputs: dict[str, ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] + """ super(AzureMLBatchExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'AzureMLBatchExecution' # type: str self.global_parameters = global_parameters @@ -6639,54 +8895,53 @@ class AzureMLExecutePipelineActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param ml_pipeline_id: ID of the published Azure ML pipeline. Type: string (or Expression with + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar ml_pipeline_id: ID of the published Azure ML pipeline. Type: string (or Expression with resultType string). - :type ml_pipeline_id: any - :param ml_pipeline_endpoint_id: ID of the published Azure ML pipeline endpoint. Type: string - (or Expression with resultType string). - :type ml_pipeline_endpoint_id: any - :param version: Version of the published Azure ML pipeline endpoint. Type: string (or + :vartype ml_pipeline_id: any + :ivar ml_pipeline_endpoint_id: ID of the published Azure ML pipeline endpoint. Type: string (or Expression with resultType string). - :type version: any - :param experiment_name: Run history experiment name of the pipeline run. This information will + :vartype ml_pipeline_endpoint_id: any + :ivar version: Version of the published Azure ML pipeline endpoint. Type: string (or Expression + with resultType string). + :vartype version: any + :ivar experiment_name: Run history experiment name of the pipeline run. This information will be passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with resultType string). - :type experiment_name: any - :param ml_pipeline_parameters: Key,Value pairs to be passed to the published Azure ML pipeline + :vartype experiment_name: any + :ivar ml_pipeline_parameters: Key,Value pairs to be passed to the published Azure ML pipeline endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. Values will be passed in the ParameterAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). - :type ml_pipeline_parameters: any - :param data_path_assignments: Dictionary used for changing data path assignments without + :vartype ml_pipeline_parameters: any + :ivar data_path_assignments: Dictionary used for changing data path assignments without retraining. Values will be passed in the dataPathAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). - :type data_path_assignments: any - :param ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be + :vartype data_path_assignments: any + :ivar ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType string). - :type ml_parent_run_id: any - :param continue_on_step_failure: Whether to continue execution of other steps in the - PipelineRun if a step fails. This information will be passed in the continueOnStepFailure - property of the published pipeline execution request. Type: boolean (or Expression with - resultType boolean). - :type continue_on_step_failure: any + :vartype ml_parent_run_id: any + :ivar continue_on_step_failure: Whether to continue execution of other steps in the PipelineRun + if a step fails. This information will be passed in the continueOnStepFailure property of the + published pipeline execution request. Type: boolean (or Expression with resultType boolean). + :vartype continue_on_step_failure: any """ _validation = { @@ -6733,6 +8988,54 @@ def __init__( continue_on_step_failure: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword ml_pipeline_id: ID of the published Azure ML pipeline. Type: string (or Expression + with resultType string). + :paramtype ml_pipeline_id: any + :keyword ml_pipeline_endpoint_id: ID of the published Azure ML pipeline endpoint. Type: string + (or Expression with resultType string). + :paramtype ml_pipeline_endpoint_id: any + :keyword version: Version of the published Azure ML pipeline endpoint. Type: string (or + Expression with resultType string). + :paramtype version: any + :keyword experiment_name: Run history experiment name of the pipeline run. This information + will be passed in the ExperimentName property of the published pipeline execution request. + Type: string (or Expression with resultType string). + :paramtype experiment_name: any + :keyword ml_pipeline_parameters: Key,Value pairs to be passed to the published Azure ML + pipeline endpoint. Keys must match the names of pipeline parameters defined in the published + pipeline. Values will be passed in the ParameterAssignments property of the published pipeline + execution request. Type: object with key value pairs (or Expression with resultType object). + :paramtype ml_pipeline_parameters: any + :keyword data_path_assignments: Dictionary used for changing data path assignments without + retraining. Values will be passed in the dataPathAssignments property of the published pipeline + execution request. Type: object with key value pairs (or Expression with resultType object). + :paramtype data_path_assignments: any + :keyword ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will + be passed in the ParentRunId property of the published pipeline execution request. Type: string + (or Expression with resultType string). + :paramtype ml_parent_run_id: any + :keyword continue_on_step_failure: Whether to continue execution of other steps in the + PipelineRun if a step fails. This information will be passed in the continueOnStepFailure + property of the published pipeline execution request. Type: boolean (or Expression with + resultType boolean). + :paramtype continue_on_step_failure: any + """ super(AzureMLExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'AzureMLExecutePipeline' # type: str self.ml_pipeline_id = ml_pipeline_id @@ -6750,44 +9053,44 @@ class AzureMLLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML Studio Web Service + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar ml_endpoint: Required. The Batch Execution REST URL for an Azure ML Studio Web Service + endpoint. Type: string (or Expression with resultType string). + :vartype ml_endpoint: any + :ivar api_key: Required. The API key for accessing the Azure ML model endpoint. + :vartype api_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar update_resource_endpoint: The Update Resource REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). - :type ml_endpoint: any - :param api_key: Required. The API key for accessing the Azure ML model endpoint. - :type api_key: ~azure.mgmt.datafactory.models.SecretBase - :param update_resource_endpoint: The Update Resource REST URL for an Azure ML Studio Web - Service endpoint. Type: string (or Expression with resultType string). - :type update_resource_endpoint: any - :param service_principal_id: The ID of the service principal used to authenticate against the + :vartype update_resource_endpoint: any + :ivar service_principal_id: The ID of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: The key of the service principal used to authenticate against the + :vartype service_principal_id: any + :ivar service_principal_key: The key of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string + (or Expression with resultType string). + :vartype tenant: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param authentication: Type of authentication (Required to specify MSI) used to connect to + :vartype encrypted_credential: any + :ivar authentication: Type of authentication (Required to specify MSI) used to connect to AzureML. Type: string (or Expression with resultType string). - :type authentication: any + :vartype authentication: any """ _validation = { @@ -6831,6 +9134,44 @@ def __init__( authentication: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword ml_endpoint: Required. The Batch Execution REST URL for an Azure ML Studio Web Service + endpoint. Type: string (or Expression with resultType string). + :paramtype ml_endpoint: any + :keyword api_key: Required. The API key for accessing the Azure ML model endpoint. + :paramtype api_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword update_resource_endpoint: The Update Resource REST URL for an Azure ML Studio Web + Service endpoint. Type: string (or Expression with resultType string). + :paramtype update_resource_endpoint: any + :keyword service_principal_id: The ID of the service principal used to authenticate against the + ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression + with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: The key of the service principal used to authenticate against + the ARM-based updateResourceEndpoint of an Azure ML Studio web service. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :paramtype tenant: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword authentication: Type of authentication (Required to specify MSI) used to connect to + AzureML. Type: string (or Expression with resultType string). + :paramtype authentication: any + """ super(AzureMLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureML' # type: str self.ml_endpoint = ml_endpoint @@ -6848,42 +9189,42 @@ class AzureMLServiceLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param subscription_id: Required. Azure ML Service workspace subscription ID. Type: string (or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar subscription_id: Required. Azure ML Service workspace subscription ID. Type: string (or Expression with resultType string). - :type subscription_id: any - :param resource_group_name: Required. Azure ML Service workspace resource group name. Type: + :vartype subscription_id: any + :ivar resource_group_name: Required. Azure ML Service workspace resource group name. Type: string (or Expression with resultType string). - :type resource_group_name: any - :param ml_workspace_name: Required. Azure ML Service workspace name. Type: string (or - Expression with resultType string). - :type ml_workspace_name: any - :param service_principal_id: The ID of the service principal used to authenticate against the + :vartype resource_group_name: any + :ivar ml_workspace_name: Required. Azure ML Service workspace name. Type: string (or Expression + with resultType string). + :vartype ml_workspace_name: any + :ivar service_principal_id: The ID of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: The key of the service principal used to authenticate against the + :vartype service_principal_id: any + :ivar service_principal_key: The key of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string + (or Expression with resultType string). + :vartype tenant: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -6926,6 +9267,42 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword subscription_id: Required. Azure ML Service workspace subscription ID. Type: string + (or Expression with resultType string). + :paramtype subscription_id: any + :keyword resource_group_name: Required. Azure ML Service workspace resource group name. Type: + string (or Expression with resultType string). + :paramtype resource_group_name: any + :keyword ml_workspace_name: Required. Azure ML Service workspace name. Type: string (or + Expression with resultType string). + :paramtype ml_workspace_name: any + :keyword service_principal_id: The ID of the service principal used to authenticate against the + endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType + string). + :paramtype service_principal_id: any + :keyword service_principal_key: The key of the service principal used to authenticate against + the endpoint of a published Azure ML Service pipeline. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :paramtype tenant: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AzureMLServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureMLService' # type: str self.subscription_id = subscription_id @@ -6942,33 +9319,34 @@ class AzureMLUpdateResourceActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param trained_model_name: Required. Name of the Trained Model module in the Web Service + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar trained_model_name: Required. Name of the Trained Model module in the Web Service experiment to be updated. Type: string (or Expression with resultType string). - :type trained_model_name: any - :param trained_model_linked_service_name: Required. Name of Azure Storage linked service - holding the .ilearner file that will be uploaded by the update operation. - :type trained_model_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param trained_model_file_path: Required. The relative file path in trainedModelLinkedService - to represent the .ilearner file that will be uploaded by the update operation. Type: string - (or Expression with resultType string). - :type trained_model_file_path: any + :vartype trained_model_name: any + :ivar trained_model_linked_service_name: Required. Name of Azure Storage linked service holding + the .ilearner file that will be uploaded by the update operation. + :vartype trained_model_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar trained_model_file_path: Required. The relative file path in trainedModelLinkedService to + represent the .ilearner file that will be uploaded by the update operation. Type: string (or + Expression with resultType string). + :vartype trained_model_file_path: any """ _validation = { @@ -7008,6 +9386,34 @@ def __init__( policy: Optional["ActivityPolicy"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword trained_model_name: Required. Name of the Trained Model module in the Web Service + experiment to be updated. Type: string (or Expression with resultType string). + :paramtype trained_model_name: any + :keyword trained_model_linked_service_name: Required. Name of Azure Storage linked service + holding the .ilearner file that will be uploaded by the update operation. + :paramtype trained_model_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword trained_model_file_path: Required. The relative file path in trainedModelLinkedService + to represent the .ilearner file that will be uploaded by the update operation. Type: string + (or Expression with resultType string). + :paramtype trained_model_file_path: any + """ super(AzureMLUpdateResourceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'AzureMLUpdateResource' # type: str self.trained_model_name = trained_model_name @@ -7020,12 +9426,12 @@ class AzureMLWebServiceFile(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param file_path: Required. The relative file path, including container name, in the Azure Blob + :ivar file_path: Required. The relative file path, including container name, in the Azure Blob Storage specified by the LinkedService. Type: string (or Expression with resultType string). - :type file_path: any - :param linked_service_name: Required. Reference to an Azure Storage LinkedService, where Azure + :vartype file_path: any + :ivar linked_service_name: Required. Reference to an Azure Storage LinkedService, where Azure ML WebService Input/Output file located. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { @@ -7045,6 +9451,15 @@ def __init__( linked_service_name: "LinkedServiceReference", **kwargs ): + """ + :keyword file_path: Required. The relative file path, including container name, in the Azure + Blob Storage specified by the LinkedService. Type: string (or Expression with resultType + string). + :paramtype file_path: any + :keyword linked_service_name: Required. Reference to an Azure Storage LinkedService, where + Azure ML WebService Input/Output file located. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + """ super(AzureMLWebServiceFile, self).__init__(**kwargs) self.file_path = file_path self.linked_service_name = linked_service_name @@ -7055,28 +9470,28 @@ class AzureMySqlLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_string: any + :ivar password: The Azure key vault secret reference of password in connection string. + :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -7109,6 +9524,28 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword password: The Azure key vault secret reference of password in connection string. + :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AzureMySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureMySql' # type: str self.connection_string = connection_string @@ -7121,32 +9558,32 @@ class AzureMySqlSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param pre_copy_script: A query to execute before starting the copy. Type: string (or - Expression with resultType string). - :type pre_copy_script: any + :vartype disable_metrics_collection: any + :ivar pre_copy_script: A query to execute before starting the copy. Type: string (or Expression + with resultType string). + :vartype pre_copy_script: any """ _validation = { @@ -7178,6 +9615,32 @@ def __init__( pre_copy_script: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :paramtype pre_copy_script: any + """ super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureMySqlSink' # type: str self.pre_copy_script = pre_copy_script @@ -7188,31 +9651,31 @@ class AzureMySqlSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: Database query. Type: string (or Expression with resultType string). - :type query: any + :vartype additional_columns: any + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: any """ _validation = { @@ -7244,6 +9707,31 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: any + """ super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureMySqlSource' # type: str self.query = query @@ -7254,34 +9742,34 @@ class AzureMySqlTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The Azure MySQL database table name. Type: string (or Expression with + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The Azure MySQL database table name. Type: string (or Expression with resultType string). - :type table_name: any - :param table: The name of Azure MySQL database table. Type: string (or Expression with + :vartype table_name: any + :ivar table: The name of Azure MySQL database table. Type: string (or Expression with resultType string). - :type table: any + :vartype table: any """ _validation = { @@ -7318,6 +9806,34 @@ def __init__( table: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The Azure MySQL database table name. Type: string (or Expression with + resultType string). + :paramtype table_name: any + :keyword table: The name of Azure MySQL database table. Type: string (or Expression with + resultType string). + :paramtype table: any + """ super(AzureMySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzureMySqlTable' # type: str self.table_name = table_name @@ -7329,28 +9845,28 @@ class AzurePostgreSqlLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: An ODBC connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_string: any + :ivar password: The Azure key vault secret reference of password in connection string. + :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -7382,6 +9898,28 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword password: The Azure key vault secret reference of password in connection string. + :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AzurePostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzurePostgreSql' # type: str self.connection_string = connection_string @@ -7394,32 +9932,32 @@ class AzurePostgreSqlSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param pre_copy_script: A query to execute before starting the copy. Type: string (or - Expression with resultType string). - :type pre_copy_script: any + :vartype disable_metrics_collection: any + :ivar pre_copy_script: A query to execute before starting the copy. Type: string (or Expression + with resultType string). + :vartype pre_copy_script: any """ _validation = { @@ -7451,6 +9989,32 @@ def __init__( pre_copy_script: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :paramtype pre_copy_script: any + """ super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzurePostgreSqlSink' # type: str self.pre_copy_script = pre_copy_script @@ -7461,32 +10025,32 @@ class AzurePostgreSqlSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -7518,6 +10082,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzurePostgreSqlSource' # type: str self.query = query @@ -7528,37 +10118,37 @@ class AzurePostgreSqlTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name of the Azure PostgreSQL database which includes both schema + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name of the Azure PostgreSQL database which includes both schema and table. Type: string (or Expression with resultType string). - :type table_name: any - :param table: The table name of the Azure PostgreSQL database. Type: string (or Expression with + :vartype table_name: any + :ivar table: The table name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). - :type table: any - :param schema_type_properties_schema: The schema name of the Azure PostgreSQL database. Type: + :vartype table: any + :ivar schema_type_properties_schema: The schema name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any + :vartype schema_type_properties_schema: any """ _validation = { @@ -7597,6 +10187,37 @@ def __init__( schema_type_properties_schema: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name of the Azure PostgreSQL database which includes both schema + and table. Type: string (or Expression with resultType string). + :paramtype table_name: any + :keyword table: The table name of the Azure PostgreSQL database. Type: string (or Expression + with resultType string). + :paramtype table: any + :keyword schema_type_properties_schema: The schema name of the Azure PostgreSQL database. Type: + string (or Expression with resultType string). + :paramtype schema_type_properties_schema: any + """ super(AzurePostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzurePostgreSqlTable' # type: str self.table_name = table_name @@ -7609,29 +10230,29 @@ class AzureQueueSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any + :vartype disable_metrics_collection: any """ _validation = { @@ -7661,6 +10282,29 @@ def __init__( disable_metrics_collection: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + """ super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureQueueSink' # type: str @@ -7670,31 +10314,31 @@ class AzureSearchIndexDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param index_name: Required. The name of the Azure Search Index. Type: string (or Expression + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar index_name: Required. The name of the Azure Search Index. Type: string (or Expression with resultType string). - :type index_name: any + :vartype index_name: any """ _validation = { @@ -7730,6 +10374,31 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword index_name: Required. The name of the Azure Search Index. Type: string (or Expression + with resultType string). + :paramtype index_name: any + """ super(AzureSearchIndexDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzureSearchIndex' # type: str self.index_name = index_name @@ -7740,32 +10409,33 @@ class AzureSearchIndexSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param write_behavior: Specify the write behavior when upserting documents into Azure Search + :vartype disable_metrics_collection: any + :ivar write_behavior: Specify the write behavior when upserting documents into Azure Search Index. Possible values include: "Merge", "Upload". - :type write_behavior: str or ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType + :vartype write_behavior: str or + ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType """ _validation = { @@ -7797,6 +10467,33 @@ def __init__( write_behavior: Optional[Union[str, "AzureSearchIndexWriteBehaviorType"]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword write_behavior: Specify the write behavior when upserting documents into Azure Search + Index. Possible values include: "Merge", "Upload". + :paramtype write_behavior: str or + ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType + """ super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureSearchIndexSink' # type: str self.write_behavior = write_behavior @@ -7807,28 +10504,28 @@ class AzureSearchLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param url: Required. URL for Azure Search service. Type: string (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar url: Required. URL for Azure Search service. Type: string (or Expression with resultType string). - :type url: any - :param key: Admin Key for Azure Search service. - :type key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype url: any + :ivar key: Admin Key for Azure Search service. + :vartype key: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -7861,6 +10558,28 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword url: Required. URL for Azure Search service. Type: string (or Expression with + resultType string). + :paramtype url: any + :keyword key: Admin Key for Azure Search service. + :paramtype key: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(AzureSearchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureSearch' # type: str self.url = url @@ -7873,45 +10592,45 @@ class AzureSqlDatabaseLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to authenticate against Azure + :vartype connection_string: any + :ivar password: The Azure key vault secret reference of password in connection string. + :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar service_principal_id: The ID of the service principal used to authenticate against Azure SQL Database. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: The key of the service principal used to authenticate against + :vartype service_principal_id: any + :ivar service_principal_key: The key of the service principal used to authenticate against Azure SQL Database. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: any - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string + (or Expression with resultType string). + :vartype tenant: any + :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype azure_cloud_type: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param always_encrypted_settings: Sql always encrypted properties. - :type always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype encrypted_credential: any + :ivar always_encrypted_settings: Sql always encrypted properties. + :vartype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -7956,6 +10675,46 @@ def __init__( credential: Optional["CredentialReference"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword password: The Azure key vault secret reference of password in connection string. + :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword service_principal_id: The ID of the service principal used to authenticate against + Azure SQL Database. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: The key of the service principal used to authenticate against + Azure SQL Database. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :paramtype tenant: any + :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. + Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is + the data factory regions’ cloud type. Type: string (or Expression with resultType string). + :paramtype azure_cloud_type: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword always_encrypted_settings: Sql always encrypted properties. + :paramtype always_encrypted_settings: + ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureSqlDatabase' # type: str self.connection_string = connection_string @@ -7974,43 +10733,43 @@ class AzureSqlDWLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to authenticate against Azure + :vartype connection_string: any + :ivar password: The Azure key vault secret reference of password in connection string. + :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar service_principal_id: The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: The key of the service principal used to authenticate against + :vartype service_principal_id: any + :ivar service_principal_key: The key of the service principal used to authenticate against Azure SQL Data Warehouse. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: any - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string + (or Expression with resultType string). + :vartype tenant: any + :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype azure_cloud_type: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype encrypted_credential: any + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -8053,6 +10812,43 @@ def __init__( credential: Optional["CredentialReference"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword password: The Azure key vault secret reference of password in connection string. + :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword service_principal_id: The ID of the service principal used to authenticate against + Azure SQL Data Warehouse. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: The key of the service principal used to authenticate against + Azure SQL Data Warehouse. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :paramtype tenant: any + :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. + Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is + the data factory regions’ cloud type. Type: string (or Expression with resultType string). + :paramtype azure_cloud_type: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(AzureSqlDWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureSqlDW' # type: str self.connection_string = connection_string @@ -8070,37 +10866,37 @@ class AzureSqlDWTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param schema_type_properties_schema: The schema name of the Azure SQL Data Warehouse. Type: + :vartype table_name: any + :ivar schema_type_properties_schema: The schema name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any - :param table: The table name of the Azure SQL Data Warehouse. Type: string (or Expression with + :vartype schema_type_properties_schema: any + :ivar table: The table name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). - :type table: any + :vartype table: any """ _validation = { @@ -8139,6 +10935,37 @@ def __init__( table: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword schema_type_properties_schema: The schema name of the Azure SQL Data Warehouse. Type: + string (or Expression with resultType string). + :paramtype schema_type_properties_schema: any + :keyword table: The table name of the Azure SQL Data Warehouse. Type: string (or Expression + with resultType string). + :paramtype table: any + """ super(AzureSqlDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzureSqlDWTable' # type: str self.table_name = table_name @@ -8151,45 +10978,45 @@ class AzureSqlMILinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to authenticate against Azure + :vartype connection_string: any + :ivar password: The Azure key vault secret reference of password in connection string. + :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar service_principal_id: The ID of the service principal used to authenticate against Azure SQL Managed Instance. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: The key of the service principal used to authenticate against + :vartype service_principal_id: any + :ivar service_principal_key: The key of the service principal used to authenticate against Azure SQL Managed Instance. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: any - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string + (or Expression with resultType string). + :vartype tenant: any + :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype azure_cloud_type: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param always_encrypted_settings: Sql always encrypted properties. - :type always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype encrypted_credential: any + :ivar always_encrypted_settings: Sql always encrypted properties. + :vartype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -8234,6 +11061,46 @@ def __init__( credential: Optional["CredentialReference"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword password: The Azure key vault secret reference of password in connection string. + :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword service_principal_id: The ID of the service principal used to authenticate against + Azure SQL Managed Instance. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: The key of the service principal used to authenticate against + Azure SQL Managed Instance. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :paramtype tenant: any + :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. + Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is + the data factory regions’ cloud type. Type: string (or Expression with resultType string). + :paramtype azure_cloud_type: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword always_encrypted_settings: Sql always encrypted properties. + :paramtype always_encrypted_settings: + ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(AzureSqlMILinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureSqlMI' # type: str self.connection_string = connection_string @@ -8252,37 +11119,37 @@ class AzureSqlMITableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param schema_type_properties_schema: The schema name of the Azure SQL Managed Instance. Type: + :vartype table_name: any + :ivar schema_type_properties_schema: The schema name of the Azure SQL Managed Instance. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any - :param table: The table name of the Azure SQL Managed Instance dataset. Type: string (or + :vartype schema_type_properties_schema: any + :ivar table: The table name of the Azure SQL Managed Instance dataset. Type: string (or Expression with resultType string). - :type table: any + :vartype table: any """ _validation = { @@ -8321,6 +11188,37 @@ def __init__( table: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword schema_type_properties_schema: The schema name of the Azure SQL Managed Instance. + Type: string (or Expression with resultType string). + :paramtype schema_type_properties_schema: any + :keyword table: The table name of the Azure SQL Managed Instance dataset. Type: string (or + Expression with resultType string). + :paramtype table: any + """ super(AzureSqlMITableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzureSqlMITable' # type: str self.table_name = table_name @@ -8333,55 +11231,55 @@ class AzureSqlSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + :vartype disable_metrics_collection: any + :ivar sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: any - :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with - resultType string). - :type sql_writer_table_type: any - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + :vartype sql_writer_stored_procedure_name: any + :ivar sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType + string). + :vartype sql_writer_table_type: any + :ivar pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: any - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, + :vartype pre_copy_script: any + :ivar stored_procedure_parameters: SQL stored procedure parameters. + :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + :ivar stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). - :type stored_procedure_table_type_parameter_name: any - :param table_option: The option to handle sink table, such as autoCreate. For now only + :vartype stored_procedure_table_type_parameter_name: any + :ivar table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: any - :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + :vartype table_option: any + :ivar sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). - :type sql_writer_use_table_lock: any - :param write_behavior: Write behavior when copying data into Azure SQL. Type: + :vartype sql_writer_use_table_lock: any + :ivar write_behavior: Write behavior when copying data into Azure SQL. Type: SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). - :type write_behavior: any - :param upsert_settings: SQL upsert settings. - :type upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings + :vartype write_behavior: any + :ivar upsert_settings: SQL upsert settings. + :vartype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings """ _validation = { @@ -8429,6 +11327,55 @@ def __init__( upsert_settings: Optional["SqlUpsertSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + Expression with resultType string). + :paramtype sql_writer_stored_procedure_name: any + :keyword sql_writer_table_type: SQL writer table type. Type: string (or Expression with + resultType string). + :paramtype sql_writer_table_type: any + :keyword pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :paramtype pre_copy_script: any + :keyword stored_procedure_parameters: SQL stored procedure parameters. + :paramtype stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :keyword stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + table type. Type: string (or Expression with resultType string). + :paramtype stored_procedure_table_type_parameter_name: any + :keyword table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :paramtype table_option: any + :keyword sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean + (or Expression with resultType boolean). + :paramtype sql_writer_use_table_lock: any + :keyword write_behavior: Write behavior when copying data into Azure SQL. Type: + SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + :paramtype write_behavior: any + :keyword upsert_settings: SQL upsert settings. + :paramtype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings + """ super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureSqlSink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name @@ -8447,46 +11394,46 @@ class AzureSqlSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: any - :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database - source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression - with resultType string). - :type sql_reader_stored_procedure_name: any - :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + :vartype additional_columns: any + :ivar sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :vartype sql_reader_query: any + :ivar sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. + This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with + resultType string). + :vartype sql_reader_stored_procedure_name: any + :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, + :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: any - :param partition_option: The partition mechanism that will be used for Sql read in parallel. + :ivar produce_additional_types: Which additional types to produce. + :vartype produce_additional_types: any + :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: any - :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings + :vartype partition_option: any + :ivar partition_settings: The settings that will be leveraged for Sql source partitioning. + :vartype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -8528,6 +11475,47 @@ def __init__( partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword sql_reader_query: SQL reader query. Type: string (or Expression with resultType + string). + :paramtype sql_reader_query: any + :keyword sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database + source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression + with resultType string). + :paramtype sql_reader_stored_procedure_name: any + :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :paramtype stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :keyword produce_additional_types: Which additional types to produce. + :paramtype produce_additional_types: any + :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :paramtype partition_option: any + :keyword partition_settings: The settings that will be leveraged for Sql source partitioning. + :paramtype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings + """ super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureSqlSource' # type: str self.sql_reader_query = sql_reader_query @@ -8543,37 +11531,37 @@ class AzureSqlTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param schema_type_properties_schema: The schema name of the Azure SQL database. Type: string + :vartype table_name: any + :ivar schema_type_properties_schema: The schema name of the Azure SQL database. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any - :param table: The table name of the Azure SQL database. Type: string (or Expression with + :vartype schema_type_properties_schema: any + :ivar table: The table name of the Azure SQL database. Type: string (or Expression with resultType string). - :type table: any + :vartype table: any """ _validation = { @@ -8612,6 +11600,37 @@ def __init__( table: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword schema_type_properties_schema: The schema name of the Azure SQL database. Type: string + (or Expression with resultType string). + :paramtype schema_type_properties_schema: any + :keyword table: The table name of the Azure SQL database. Type: string (or Expression with + resultType string). + :paramtype table: any + """ super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzureSqlTable' # type: str self.table_name = table_name @@ -8624,33 +11643,33 @@ class AzureStorageLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: The connection string. It is mutually exclusive with sasUri property. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with + :vartype connection_string: any + :ivar account_key: The Azure key vault secret reference of accountKey in connection string. + :vartype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: any - :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype sas_uri: any + :ivar sas_token: The Azure key vault secret reference of sasToken in sas uri. + :vartype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str + :vartype encrypted_credential: str """ _validation = { @@ -8686,6 +11705,33 @@ def __init__( encrypted_credential: Optional[str] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: The connection string. It is mutually exclusive with sasUri + property. Type: string, SecureString or AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword account_key: The Azure key vault secret reference of accountKey in connection string. + :paramtype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with + connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. + :paramtype sas_uri: any + :keyword sas_token: The Azure key vault secret reference of sasToken in sas uri. + :paramtype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: str + """ super(AzureStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureStorage' # type: str self.connection_string = connection_string @@ -8700,31 +11746,31 @@ class AzureTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: Required. The table name of the Azure Table storage. Type: string (or + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: Required. The table name of the Azure Table storage. Type: string (or Expression with resultType string). - :type table_name: any + :vartype table_name: any """ _validation = { @@ -8760,6 +11806,31 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: Required. The table name of the Azure Table storage. Type: string (or + Expression with resultType string). + :paramtype table_name: any + """ super(AzureTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzureTable' # type: str self.table_name = table_name @@ -8770,41 +11841,41 @@ class AzureTableSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param azure_table_default_partition_key_value: Azure Table default partition key value. Type: + :vartype disable_metrics_collection: any + :ivar azure_table_default_partition_key_value: Azure Table default partition key value. Type: string (or Expression with resultType string). - :type azure_table_default_partition_key_value: any - :param azure_table_partition_key_name: Azure Table partition key name. Type: string (or + :vartype azure_table_default_partition_key_value: any + :ivar azure_table_partition_key_name: Azure Table partition key name. Type: string (or Expression with resultType string). - :type azure_table_partition_key_name: any - :param azure_table_row_key_name: Azure Table row key name. Type: string (or Expression with + :vartype azure_table_partition_key_name: any + :ivar azure_table_row_key_name: Azure Table row key name. Type: string (or Expression with resultType string). - :type azure_table_row_key_name: any - :param azure_table_insert_type: Azure Table insert type. Type: string (or Expression with + :vartype azure_table_row_key_name: any + :ivar azure_table_insert_type: Azure Table insert type. Type: string (or Expression with resultType string). - :type azure_table_insert_type: any + :vartype azure_table_insert_type: any """ _validation = { @@ -8842,6 +11913,41 @@ def __init__( azure_table_insert_type: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword azure_table_default_partition_key_value: Azure Table default partition key value. + Type: string (or Expression with resultType string). + :paramtype azure_table_default_partition_key_value: any + :keyword azure_table_partition_key_name: Azure Table partition key name. Type: string (or + Expression with resultType string). + :paramtype azure_table_partition_key_name: any + :keyword azure_table_row_key_name: Azure Table row key name. Type: string (or Expression with + resultType string). + :paramtype azure_table_row_key_name: any + :keyword azure_table_insert_type: Azure Table insert type. Type: string (or Expression with + resultType string). + :paramtype azure_table_insert_type: any + """ super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureTableSink' # type: str self.azure_table_default_partition_key_value = azure_table_default_partition_key_value @@ -8855,35 +11961,35 @@ class AzureTableSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param azure_table_source_query: Azure Table source query. Type: string (or Expression with + :vartype additional_columns: any + :ivar azure_table_source_query: Azure Table source query. Type: string (or Expression with resultType string). - :type azure_table_source_query: any - :param azure_table_source_ignore_table_not_found: Azure Table source ignore table not found. + :vartype azure_table_source_query: any + :ivar azure_table_source_ignore_table_not_found: Azure Table source ignore table not found. Type: boolean (or Expression with resultType boolean). - :type azure_table_source_ignore_table_not_found: any + :vartype azure_table_source_ignore_table_not_found: any """ _validation = { @@ -8917,6 +12023,35 @@ def __init__( azure_table_source_ignore_table_not_found: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword azure_table_source_query: Azure Table source query. Type: string (or Expression with + resultType string). + :paramtype azure_table_source_query: any + :keyword azure_table_source_ignore_table_not_found: Azure Table source ignore table not found. + Type: boolean (or Expression with resultType boolean). + :paramtype azure_table_source_ignore_table_not_found: any + """ super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureTableSource' # type: str self.azure_table_source_query = azure_table_source_query @@ -8928,33 +12063,33 @@ class AzureTableStorageLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: The connection string. It is mutually exclusive with sasUri property. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with + :vartype connection_string: any + :ivar account_key: The Azure key vault secret reference of accountKey in connection string. + :vartype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: any - :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype sas_uri: any + :ivar sas_token: The Azure key vault secret reference of sasToken in sas uri. + :vartype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str + :vartype encrypted_credential: str """ _validation = { @@ -8990,6 +12125,33 @@ def __init__( encrypted_credential: Optional[str] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: The connection string. It is mutually exclusive with sasUri + property. Type: string, SecureString or AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword account_key: The Azure key vault secret reference of accountKey in connection string. + :paramtype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with + connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. + :paramtype sas_uri: any + :keyword sas_token: The Azure key vault secret reference of sasToken in sas uri. + :paramtype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: str + """ super(AzureTableStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureTableStorage' # type: str self.connection_string = connection_string @@ -9004,32 +12166,32 @@ class BinaryDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param location: The location of the Binary storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param compression: The data compression method used for the binary dataset. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar location: The location of the Binary storage. + :vartype location: ~azure.mgmt.datafactory.models.DatasetLocation + :ivar compression: The data compression method used for the binary dataset. + :vartype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -9066,6 +12228,32 @@ def __init__( compression: Optional["DatasetCompression"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword location: The location of the Binary storage. + :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation + :keyword compression: The data compression method used for the binary dataset. + :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ super(BinaryDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'Binary' # type: str self.location = location @@ -9080,11 +12268,11 @@ class FormatReadSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str """ _validation = { @@ -9106,6 +12294,11 @@ def __init__( additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(FormatReadSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = 'FormatReadSettings' # type: str @@ -9116,13 +12309,13 @@ class BinaryReadSettings(FormatReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param compression_properties: Compression settings. - :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar compression_properties: Compression settings. + :vartype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings """ _validation = { @@ -9142,6 +12335,13 @@ def __init__( compression_properties: Optional["CompressionReadSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword compression_properties: Compression settings. + :paramtype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings + """ super(BinaryReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'BinaryReadSettings' # type: str self.compression_properties = compression_properties @@ -9152,31 +12352,31 @@ class BinarySink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: Binary store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :vartype disable_metrics_collection: any + :ivar store_settings: Binary store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings """ _validation = { @@ -9208,6 +12408,31 @@ def __init__( store_settings: Optional["StoreWriteSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: Binary store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + """ super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'BinarySink' # type: str self.store_settings = store_settings @@ -9218,27 +12443,27 @@ class BinarySource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: Binary store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param format_settings: Binary format settings. - :type format_settings: ~azure.mgmt.datafactory.models.BinaryReadSettings + :vartype disable_metrics_collection: any + :ivar store_settings: Binary store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :ivar format_settings: Binary format settings. + :vartype format_settings: ~azure.mgmt.datafactory.models.BinaryReadSettings """ _validation = { @@ -9268,6 +12493,27 @@ def __init__( format_settings: Optional["BinaryReadSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: Binary store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :keyword format_settings: Binary format settings. + :paramtype format_settings: ~azure.mgmt.datafactory.models.BinaryReadSettings + """ super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'BinarySource' # type: str self.store_settings = store_settings @@ -9284,18 +12530,18 @@ class Trigger(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. Trigger type.Constant filled by server. + :vartype type: str + :ivar description: Trigger description. + :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[any] + :ivar annotations: List of tags that can be used for describing the trigger. + :vartype annotations: list[any] """ _validation = { @@ -9323,6 +12569,15 @@ def __init__( annotations: Optional[List[Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Trigger description. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the trigger. + :paramtype annotations: list[any] + """ super(Trigger, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = 'Trigger' # type: str @@ -9341,20 +12596,20 @@ class MultiplePipelineTrigger(Trigger): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. Trigger type.Constant filled by server. + :vartype type: str + :ivar description: Trigger description. + :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[any] - :param pipelines: Pipelines that need to be started. - :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :ivar annotations: List of tags that can be used for describing the trigger. + :vartype annotations: list[any] + :ivar pipelines: Pipelines that need to be started. + :vartype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] """ _validation = { @@ -9384,6 +12639,17 @@ def __init__( pipelines: Optional[List["TriggerPipelineReference"]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Trigger description. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the trigger. + :paramtype annotations: list[any] + :keyword pipelines: Pipelines that need to be started. + :paramtype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + """ super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) self.type = 'MultiplePipelineTrigger' # type: str self.pipelines = pipelines @@ -9396,35 +12662,35 @@ class BlobEventsTrigger(MultiplePipelineTrigger): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. Trigger type.Constant filled by server. + :vartype type: str + :ivar description: Trigger description. + :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[any] - :param pipelines: Pipelines that need to be started. - :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param blob_path_begins_with: The blob path must begin with the pattern provided for trigger to + :ivar annotations: List of tags that can be used for describing the trigger. + :vartype annotations: list[any] + :ivar pipelines: Pipelines that need to be started. + :vartype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :ivar blob_path_begins_with: The blob path must begin with the pattern provided for trigger to fire. For example, '/records/blobs/december/' will only fire the trigger for blobs in the december folder under the records container. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. - :type blob_path_begins_with: str - :param blob_path_ends_with: The blob path must end with the pattern provided for trigger to + :vartype blob_path_begins_with: str + :ivar blob_path_ends_with: The blob path must end with the pattern provided for trigger to fire. For example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a december folder. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. - :type blob_path_ends_with: str - :param ignore_empty_blobs: If set to true, blobs with zero bytes will be ignored. - :type ignore_empty_blobs: bool - :param events: Required. The type of events that cause this trigger to fire. - :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] - :param scope: Required. The ARM resource ID of the Storage Account. - :type scope: str + :vartype blob_path_ends_with: str + :ivar ignore_empty_blobs: If set to true, blobs with zero bytes will be ignored. + :vartype ignore_empty_blobs: bool + :ivar events: Required. The type of events that cause this trigger to fire. + :vartype events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] + :ivar scope: Required. The ARM resource ID of the Storage Account. + :vartype scope: str """ _validation = { @@ -9462,6 +12728,32 @@ def __init__( ignore_empty_blobs: Optional[bool] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Trigger description. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the trigger. + :paramtype annotations: list[any] + :keyword pipelines: Pipelines that need to be started. + :paramtype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :keyword blob_path_begins_with: The blob path must begin with the pattern provided for trigger + to fire. For example, '/records/blobs/december/' will only fire the trigger for blobs in the + december folder under the records container. At least one of these must be provided: + blobPathBeginsWith, blobPathEndsWith. + :paramtype blob_path_begins_with: str + :keyword blob_path_ends_with: The blob path must end with the pattern provided for trigger to + fire. For example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a + december folder. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. + :paramtype blob_path_ends_with: str + :keyword ignore_empty_blobs: If set to true, blobs with zero bytes will be ignored. + :paramtype ignore_empty_blobs: bool + :keyword events: Required. The type of events that cause this trigger to fire. + :paramtype events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] + :keyword scope: Required. The ARM resource ID of the Storage Account. + :paramtype scope: str + """ super(BlobEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) self.type = 'BlobEventsTrigger' # type: str self.blob_path_begins_with = blob_path_begins_with @@ -9476,43 +12768,43 @@ class BlobSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression + :vartype disable_metrics_collection: any + :ivar blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression with resultType boolean). - :type blob_writer_overwrite_files: any - :param blob_writer_date_time_format: Blob writer date time format. Type: string (or Expression + :vartype blob_writer_overwrite_files: any + :ivar blob_writer_date_time_format: Blob writer date time format. Type: string (or Expression with resultType string). - :type blob_writer_date_time_format: any - :param blob_writer_add_header: Blob writer add header. Type: boolean (or Expression with + :vartype blob_writer_date_time_format: any + :ivar blob_writer_add_header: Blob writer add header. Type: boolean (or Expression with resultType boolean). - :type blob_writer_add_header: any - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: any - :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects + :vartype blob_writer_add_header: any + :ivar copy_behavior: The type of copy behavior for copy sink. + :vartype copy_behavior: any + :ivar metadata: Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). - :type metadata: list[~azure.mgmt.datafactory.models.MetadataItem] + :vartype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] """ _validation = { @@ -9552,6 +12844,43 @@ def __init__( metadata: Optional[List["MetadataItem"]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression + with resultType boolean). + :paramtype blob_writer_overwrite_files: any + :keyword blob_writer_date_time_format: Blob writer date time format. Type: string (or + Expression with resultType string). + :paramtype blob_writer_date_time_format: any + :keyword blob_writer_add_header: Blob writer add header. Type: boolean (or Expression with + resultType boolean). + :paramtype blob_writer_add_header: any + :keyword copy_behavior: The type of copy behavior for copy sink. + :paramtype copy_behavior: any + :keyword metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :paramtype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] + """ super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'BlobSink' # type: str self.blob_writer_overwrite_files = blob_writer_overwrite_files @@ -9566,32 +12895,32 @@ class BlobSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType + :vartype disable_metrics_collection: any + :ivar treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). - :type treat_empty_as_null: any - :param skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or + :vartype treat_empty_as_null: any + :ivar skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). - :type skip_header_line_count: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype skip_header_line_count: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any + :vartype recursive: any """ _validation = { @@ -9623,6 +12952,32 @@ def __init__( recursive: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType + boolean). + :paramtype treat_empty_as_null: any + :keyword skip_header_line_count: Number of header lines to skip from each blob. Type: integer + (or Expression with resultType integer). + :paramtype skip_header_line_count: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + """ super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'BlobSource' # type: str self.treat_empty_as_null = treat_empty_as_null @@ -9637,27 +12992,27 @@ class BlobTrigger(MultiplePipelineTrigger): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. Trigger type.Constant filled by server. + :vartype type: str + :ivar description: Trigger description. + :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[any] - :param pipelines: Pipelines that need to be started. - :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param folder_path: Required. The path of the container/folder that will trigger the pipeline. - :type folder_path: str - :param max_concurrency: Required. The max number of parallel files to handle when it is + :ivar annotations: List of tags that can be used for describing the trigger. + :vartype annotations: list[any] + :ivar pipelines: Pipelines that need to be started. + :vartype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :ivar folder_path: Required. The path of the container/folder that will trigger the pipeline. + :vartype folder_path: str + :ivar max_concurrency: Required. The max number of parallel files to handle when it is triggered. - :type max_concurrency: int - :param linked_service: Required. The Azure Storage linked service reference. - :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :vartype max_concurrency: int + :ivar linked_service: Required. The Azure Storage linked service reference. + :vartype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { @@ -9692,6 +13047,25 @@ def __init__( pipelines: Optional[List["TriggerPipelineReference"]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Trigger description. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the trigger. + :paramtype annotations: list[any] + :keyword pipelines: Pipelines that need to be started. + :paramtype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :keyword folder_path: Required. The path of the container/folder that will trigger the + pipeline. + :paramtype folder_path: str + :keyword max_concurrency: Required. The max number of parallel files to handle when it is + triggered. + :paramtype max_concurrency: int + :keyword linked_service: Required. The Azure Storage linked service reference. + :paramtype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + """ super(BlobTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) self.type = 'BlobTrigger' # type: str self.folder_path = folder_path @@ -9704,37 +13078,36 @@ class CassandraLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. Host name for connection. Type: string (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. Host name for connection. Type: string (or Expression with resultType string). - :type host: any - :param authentication_type: AuthenticationType to be used for connection. Type: string (or + :vartype host: any + :ivar authentication_type: AuthenticationType to be used for connection. Type: string (or Expression with resultType string). - :type authentication_type: any - :param port: The port for the connection. Type: integer (or Expression with resultType - integer). - :type port: any - :param username: Username for authentication. Type: string (or Expression with resultType + :vartype authentication_type: any + :ivar port: The port for the connection. Type: integer (or Expression with resultType integer). + :vartype port: any + :ivar username: Username for authentication. Type: string (or Expression with resultType string). - :type username: any - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype username: any + :ivar password: Password for authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -9773,6 +13146,37 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. Host name for connection. Type: string (or Expression with resultType + string). + :paramtype host: any + :keyword authentication_type: AuthenticationType to be used for connection. Type: string (or + Expression with resultType string). + :paramtype authentication_type: any + :keyword port: The port for the connection. Type: integer (or Expression with resultType + integer). + :paramtype port: any + :keyword username: Username for authentication. Type: string (or Expression with resultType + string). + :paramtype username: any + :keyword password: Password for authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(CassandraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Cassandra' # type: str self.host = host @@ -9788,39 +13192,39 @@ class CassandraSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language + :vartype additional_columns: any + :ivar query: Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). - :type query: any - :param consistency_level: The consistency level specifies how many Cassandra servers must + :vartype query: any + :ivar consistency_level: The consistency level specifies how many Cassandra servers must respond to a read request before returning data to the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the read request. Must be one of cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is case-insensitive. Possible values include: "ALL", "EACH_QUORUM", "QUORUM", "LOCAL_QUORUM", "ONE", "TWO", "THREE", "LOCAL_ONE", "SERIAL", "LOCAL_SERIAL". - :type consistency_level: str or + :vartype consistency_level: str or ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels """ @@ -9855,6 +13259,40 @@ def __init__( consistency_level: Optional[Union[str, "CassandraSourceReadConsistencyLevels"]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: Database query. Should be a SQL-92 query expression or Cassandra Query Language + (CQL) command. Type: string (or Expression with resultType string). + :paramtype query: any + :keyword consistency_level: The consistency level specifies how many Cassandra servers must + respond to a read request before returning data to the client application. Cassandra checks the + specified number of Cassandra servers for data to satisfy the read request. Must be one of + cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is case-insensitive. + Possible values include: "ALL", "EACH_QUORUM", "QUORUM", "LOCAL_QUORUM", "ONE", "TWO", "THREE", + "LOCAL_ONE", "SERIAL", "LOCAL_SERIAL". + :paramtype consistency_level: str or + ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels + """ super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'CassandraSource' # type: str self.query = query @@ -9866,34 +13304,34 @@ class CassandraTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name of the Cassandra database. Type: string (or Expression with + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name of the Cassandra database. Type: string (or Expression with resultType string). - :type table_name: any - :param keyspace: The keyspace of the Cassandra database. Type: string (or Expression with + :vartype table_name: any + :ivar keyspace: The keyspace of the Cassandra database. Type: string (or Expression with resultType string). - :type keyspace: any + :vartype keyspace: any """ _validation = { @@ -9930,6 +13368,34 @@ def __init__( keyspace: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name of the Cassandra database. Type: string (or Expression with + resultType string). + :paramtype table_name: any + :keyword keyspace: The keyspace of the Cassandra database. Type: string (or Expression with + resultType string). + :paramtype keyspace: any + """ super(CassandraTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'CassandraTable' # type: str self.table_name = table_name @@ -9943,26 +13409,26 @@ class ChainingTrigger(Trigger): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. Trigger type.Constant filled by server. + :vartype type: str + :ivar description: Trigger description. + :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[any] - :param pipeline: Required. Pipeline for which runs are created when all upstream pipelines + :ivar annotations: List of tags that can be used for describing the trigger. + :vartype annotations: list[any] + :ivar pipeline: Required. Pipeline for which runs are created when all upstream pipelines complete successfully. - :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference - :param depends_on: Required. Upstream Pipelines. - :type depends_on: list[~azure.mgmt.datafactory.models.PipelineReference] - :param run_dimension: Required. Run Dimension property that needs to be emitted by upstream + :vartype pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference + :ivar depends_on: Required. Upstream Pipelines. + :vartype depends_on: list[~azure.mgmt.datafactory.models.PipelineReference] + :ivar run_dimension: Required. Run Dimension property that needs to be emitted by upstream pipelines. - :type run_dimension: str + :vartype run_dimension: str """ _validation = { @@ -9995,6 +13461,23 @@ def __init__( annotations: Optional[List[Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Trigger description. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the trigger. + :paramtype annotations: list[any] + :keyword pipeline: Required. Pipeline for which runs are created when all upstream pipelines + complete successfully. + :paramtype pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference + :keyword depends_on: Required. Upstream Pipelines. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.PipelineReference] + :keyword run_dimension: Required. Run Dimension property that needs to be emitted by upstream + pipelines. + :paramtype run_dimension: str + """ super(ChainingTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) self.type = 'ChainingTrigger' # type: str self.pipeline = pipeline @@ -10007,14 +13490,14 @@ class CloudError(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param code: Required. Error code. - :type code: str - :param message: Required. Error message. - :type message: str - :param target: Property name/path in request associated with error. - :type target: str - :param details: Array with additional error details. - :type details: list[~azure.mgmt.datafactory.models.CloudError] + :ivar code: Required. Error code. + :vartype code: str + :ivar message: Required. Error message. + :vartype message: str + :ivar target: Property name/path in request associated with error. + :vartype target: str + :ivar details: Array with additional error details. + :vartype details: list[~azure.mgmt.datafactory.models.CloudError] """ _validation = { @@ -10038,6 +13521,16 @@ def __init__( details: Optional[List["CloudError"]] = None, **kwargs ): + """ + :keyword code: Required. Error code. + :paramtype code: str + :keyword message: Required. Error message. + :paramtype message: str + :keyword target: Property name/path in request associated with error. + :paramtype target: str + :keyword details: Array with additional error details. + :paramtype details: list[~azure.mgmt.datafactory.models.CloudError] + """ super(CloudError, self).__init__(**kwargs) self.code = code self.message = message @@ -10050,14 +13543,14 @@ class CmdkeySetup(CustomSetupBase): All required parameters must be populated in order to send to Azure. - :param type: Required. The type of custom setup.Constant filled by server. - :type type: str - :param target_name: Required. The server name of data source access. - :type target_name: any - :param user_name: Required. The user name of data source access. - :type user_name: any - :param password: Required. The password of data source access. - :type password: ~azure.mgmt.datafactory.models.SecretBase + :ivar type: Required. The type of custom setup.Constant filled by server. + :vartype type: str + :ivar target_name: Required. The server name of data source access. + :vartype target_name: any + :ivar user_name: Required. The user name of data source access. + :vartype user_name: any + :ivar password: Required. The password of data source access. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -10082,6 +13575,14 @@ def __init__( password: "SecretBase", **kwargs ): + """ + :keyword target_name: Required. The server name of data source access. + :paramtype target_name: any + :keyword user_name: Required. The user name of data source access. + :paramtype user_name: any + :keyword password: Required. The password of data source access. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + """ super(CmdkeySetup, self).__init__(**kwargs) self.type = 'CmdkeySetup' # type: str self.target_name = target_name @@ -10092,9 +13593,9 @@ def __init__( class CMKIdentityDefinition(msrest.serialization.Model): """Managed Identity used for CMK. - :param user_assigned_identity: The resource id of the user assigned identity to authenticate to + :ivar user_assigned_identity: The resource id of the user assigned identity to authenticate to customer's key vault. - :type user_assigned_identity: str + :vartype user_assigned_identity: str """ _attribute_map = { @@ -10107,6 +13608,11 @@ def __init__( user_assigned_identity: Optional[str] = None, **kwargs ): + """ + :keyword user_assigned_identity: The resource id of the user assigned identity to authenticate + to customer's key vault. + :paramtype user_assigned_identity: str + """ super(CMKIdentityDefinition, self).__init__(**kwargs) self.user_assigned_identity = user_assigned_identity @@ -10116,31 +13622,31 @@ class CommonDataServiceForAppsEntityDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param entity_name: The logical name of the entity. Type: string (or Expression with resultType + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar entity_name: The logical name of the entity. Type: string (or Expression with resultType string). - :type entity_name: any + :vartype entity_name: any """ _validation = { @@ -10175,6 +13681,31 @@ def __init__( entity_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword entity_name: The logical name of the entity. Type: string (or Expression with + resultType string). + :paramtype entity_name: any + """ super(CommonDataServiceForAppsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'CommonDataServiceForAppsEntity' # type: str self.entity_name = entity_name @@ -10185,68 +13716,68 @@ class CommonDataServiceForAppsLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param deployment_type: Required. The deployment type of the Common Data Service for Apps + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar deployment_type: Required. The deployment type of the Common Data Service for Apps instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType string). - :type deployment_type: any - :param host_name: The host name of the on-premises Common Data Service for Apps server. The + :vartype deployment_type: any + :ivar host_name: The host name of the on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). - :type host_name: any - :param port: The port of on-premises Common Data Service for Apps server. The property is + :vartype host_name: any + :ivar port: The port of on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: any - :param service_uri: The URL to the Microsoft Common Data Service for Apps server. The property + :vartype port: any + :ivar service_uri: The URL to the Microsoft Common Data Service for Apps server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type service_uri: any - :param organization_name: The organization name of the Common Data Service for Apps instance. + :vartype service_uri: any + :ivar organization_name: The organization name of the Common Data Service for Apps instance. The property is required for on-prem and required for online when there are more than one Common Data Service for Apps instances associated with the user. Type: string (or Expression with resultType string). - :type organization_name: any - :param authentication_type: Required. The authentication type to connect to Common Data Service + :vartype organization_name: any + :ivar authentication_type: Required. The authentication type to connect to Common Data Service for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). - :type authentication_type: any - :param username: User name to access the Common Data Service for Apps instance. Type: string - (or Expression with resultType string). - :type username: any - :param password: Password to access the Common Data Service for Apps instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_credential_type: The service principal credential type to use in + :vartype authentication_type: any + :ivar username: User name to access the Common Data Service for Apps instance. Type: string (or + Expression with resultType string). + :vartype username: any + :ivar password: Password to access the Common Data Service for Apps instance. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar service_principal_id: The client ID of the application in Azure Active Directory used for + Server-To-Server authentication. Type: string (or Expression with resultType string). + :vartype service_principal_id: any + :ivar service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). - :type service_principal_credential_type: any - :param service_principal_credential: The credential of the service principal object in Azure + :vartype service_principal_credential_type: any + :ivar service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. - :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -10298,6 +13829,68 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword deployment_type: Required. The deployment type of the Common Data Service for Apps + instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common + Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType + string). + :paramtype deployment_type: any + :keyword host_name: The host name of the on-premises Common Data Service for Apps server. The + property is required for on-prem and not allowed for online. Type: string (or Expression with + resultType string). + :paramtype host_name: any + :keyword port: The port of on-premises Common Data Service for Apps server. The property is + required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression + with resultType integer), minimum: 0. + :paramtype port: any + :keyword service_uri: The URL to the Microsoft Common Data Service for Apps server. The + property is required for on-line and not allowed for on-prem. Type: string (or Expression with + resultType string). + :paramtype service_uri: any + :keyword organization_name: The organization name of the Common Data Service for Apps instance. + The property is required for on-prem and required for online when there are more than one + Common Data Service for Apps instances associated with the user. Type: string (or Expression + with resultType string). + :paramtype organization_name: any + :keyword authentication_type: Required. The authentication type to connect to Common Data + Service for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd + scenario. 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: + string (or Expression with resultType string). + :paramtype authentication_type: any + :keyword username: User name to access the Common Data Service for Apps instance. Type: string + (or Expression with resultType string). + :paramtype username: any + :keyword password: Password to access the Common Data Service for Apps instance. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword service_principal_id: The client ID of the application in Azure Active Directory used + for Server-To-Server authentication. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). + :paramtype service_principal_credential_type: any + :keyword service_principal_credential: The credential of the service principal object in Azure + Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', + servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If + servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only + be AzureKeyVaultSecretReference. + :paramtype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(CommonDataServiceForAppsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'CommonDataServiceForApps' # type: str self.deployment_type = deployment_type @@ -10319,39 +13912,39 @@ class CommonDataServiceForAppsSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param write_behavior: Required. The write behavior for the operation. Possible values include: + :vartype disable_metrics_collection: any + :ivar write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". - :type write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior - :param ignore_null_values: The flag indicating whether to ignore null values from input dataset + :vartype write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior + :ivar ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: any - :param alternate_key_name: The logical name of the alternate key which will be used when + :vartype ignore_null_values: any + :ivar alternate_key_name: The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). - :type alternate_key_name: any + :vartype alternate_key_name: any """ _validation = { @@ -10388,6 +13981,39 @@ def __init__( alternate_key_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword write_behavior: Required. The write behavior for the operation. Possible values + include: "Upsert". + :paramtype write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior + :keyword ignore_null_values: The flag indicating whether to ignore null values from input + dataset (except key fields) during write operation. Default is false. Type: boolean (or + Expression with resultType boolean). + :paramtype ignore_null_values: any + :keyword alternate_key_name: The logical name of the alternate key which will be used when + upserting records. Type: string (or Expression with resultType string). + :paramtype alternate_key_name: any + """ super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CommonDataServiceForAppsSink' # type: str self.write_behavior = write_behavior @@ -10400,29 +14026,29 @@ class CommonDataServiceForAppsSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: FetchXML is a proprietary query language that is used in Microsoft Common Data + :vartype disable_metrics_collection: any + :ivar query: FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). - :type query: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -10452,6 +14078,29 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: FetchXML is a proprietary query language that is used in Microsoft Common Data + Service for Apps (online & on-premises). Type: string (or Expression with resultType string). + :paramtype query: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CommonDataServiceForAppsSource' # type: str self.query = query @@ -10463,12 +14112,12 @@ class ComponentSetup(CustomSetupBase): All required parameters must be populated in order to send to Azure. - :param type: Required. The type of custom setup.Constant filled by server. - :type type: str - :param component_name: Required. The name of the 3rd party component. - :type component_name: str - :param license_key: The license key to activate the component. - :type license_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar type: Required. The type of custom setup.Constant filled by server. + :vartype type: str + :ivar component_name: Required. The name of the 3rd party component. + :vartype component_name: str + :ivar license_key: The license key to activate the component. + :vartype license_key: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -10489,6 +14138,12 @@ def __init__( license_key: Optional["SecretBase"] = None, **kwargs ): + """ + :keyword component_name: Required. The name of the 3rd party component. + :paramtype component_name: str + :keyword license_key: The license key to activate the component. + :paramtype license_key: ~azure.mgmt.datafactory.models.SecretBase + """ super(ComponentSetup, self).__init__(**kwargs) self.type = 'ComponentSetup' # type: str self.component_name = component_name @@ -10503,11 +14158,11 @@ class CompressionReadSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The Compression setting type.Constant filled by server. - :type type: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. The Compression setting type.Constant filled by server. + :vartype type: str """ _validation = { @@ -10529,6 +14184,11 @@ def __init__( additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(CompressionReadSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = 'CompressionReadSettings' # type: str @@ -10539,43 +14199,43 @@ class ConcurLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_properties: Properties used to connect to Concur. It is mutually exclusive + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_properties: Properties used to connect to Concur. It is mutually exclusive with any other properties in the linked service. Type: object. - :type connection_properties: any - :param client_id: Required. Application client_id supplied by Concur App Management. - :type client_id: any - :param username: Required. The user name that you use to access Concur Service. - :type username: any - :param password: The password corresponding to the user name that you provided in the username + :vartype connection_properties: any + :ivar client_id: Required. Application client_id supplied by Concur App Management. + :vartype client_id: any + :ivar username: Required. The user name that you use to access Concur Service. + :vartype username: any + :ivar password: The password corresponding to the user name that you provided in the username field. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -10619,6 +14279,43 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_properties: Properties used to connect to Concur. It is mutually exclusive + with any other properties in the linked service. Type: object. + :paramtype connection_properties: any + :keyword client_id: Required. Application client_id supplied by Concur App Management. + :paramtype client_id: any + :keyword username: Required. The user name that you use to access Concur Service. + :paramtype username: any + :keyword password: The password corresponding to the user name that you provided in the + username field. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(ConcurLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Concur' # type: str self.connection_properties = connection_properties @@ -10636,30 +14333,30 @@ class ConcurObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -10694,6 +14391,30 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(ConcurObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'ConcurObject' # type: str self.table_name = table_name @@ -10704,32 +14425,32 @@ class ConcurSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -10761,6 +14482,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ConcurSource' # type: str self.query = query @@ -10795,6 +14542,8 @@ def __init__( self, **kwargs ): + """ + """ super(ConnectionStateProperties, self).__init__(**kwargs) self.actions_required = None self.description = None @@ -10806,66 +14555,66 @@ class CopyActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param inputs: List of inputs for the activity. - :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] - :param outputs: List of outputs for the activity. - :type outputs: list[~azure.mgmt.datafactory.models.DatasetReference] - :param source: Required. Copy activity source. - :type source: ~azure.mgmt.datafactory.models.CopySource - :param sink: Required. Copy activity sink. - :type sink: ~azure.mgmt.datafactory.models.CopySink - :param translator: Copy activity translator. If not specified, tabular translator is used. - :type translator: any - :param enable_staging: Specifies whether to copy data via an interim staging. Default value is + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar inputs: List of inputs for the activity. + :vartype inputs: list[~azure.mgmt.datafactory.models.DatasetReference] + :ivar outputs: List of outputs for the activity. + :vartype outputs: list[~azure.mgmt.datafactory.models.DatasetReference] + :ivar source: Required. Copy activity source. + :vartype source: ~azure.mgmt.datafactory.models.CopySource + :ivar sink: Required. Copy activity sink. + :vartype sink: ~azure.mgmt.datafactory.models.CopySink + :ivar translator: Copy activity translator. If not specified, tabular translator is used. + :vartype translator: any + :ivar enable_staging: Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). - :type enable_staging: any - :param staging_settings: Specifies interim staging settings when EnableStaging is true. - :type staging_settings: ~azure.mgmt.datafactory.models.StagingSettings - :param parallel_copies: Maximum number of concurrent sessions opened on the source or sink to + :vartype enable_staging: any + :ivar staging_settings: Specifies interim staging settings when EnableStaging is true. + :vartype staging_settings: ~azure.mgmt.datafactory.models.StagingSettings + :ivar parallel_copies: Maximum number of concurrent sessions opened on the source or sink to avoid overloading the data store. Type: integer (or Expression with resultType integer), minimum: 0. - :type parallel_copies: any - :param data_integration_units: Maximum number of data integration units that can be used to + :vartype parallel_copies: any + :ivar data_integration_units: Maximum number of data integration units that can be used to perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. - :type data_integration_units: any - :param enable_skip_incompatible_row: Whether to skip incompatible row. Default value is false. + :vartype data_integration_units: any + :ivar enable_skip_incompatible_row: Whether to skip incompatible row. Default value is false. Type: boolean (or Expression with resultType boolean). - :type enable_skip_incompatible_row: any - :param redirect_incompatible_row_settings: Redirect incompatible row settings when + :vartype enable_skip_incompatible_row: any + :ivar redirect_incompatible_row_settings: Redirect incompatible row settings when EnableSkipIncompatibleRow is true. - :type redirect_incompatible_row_settings: + :vartype redirect_incompatible_row_settings: ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings - :param log_storage_settings: (Deprecated. Please use LogSettings) Log storage settings customer + :ivar log_storage_settings: (Deprecated. Please use LogSettings) Log storage settings customer need to provide when enabling session log. - :type log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings - :param log_settings: Log settings customer needs provide when enabling log. - :type log_settings: ~azure.mgmt.datafactory.models.LogSettings - :param preserve_rules: Preserve Rules. - :type preserve_rules: list[any] - :param preserve: Preserve rules. - :type preserve: list[any] - :param validate_data_consistency: Whether to enable Data Consistency validation. Type: boolean + :vartype log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings + :ivar log_settings: Log settings customer needs provide when enabling log. + :vartype log_settings: ~azure.mgmt.datafactory.models.LogSettings + :ivar preserve_rules: Preserve Rules. + :vartype preserve_rules: list[any] + :ivar preserve: Preserve rules. + :vartype preserve: list[any] + :ivar validate_data_consistency: Whether to enable Data Consistency validation. Type: boolean (or Expression with resultType boolean). - :type validate_data_consistency: any - :param skip_error_file: Specify the fault tolerance for data consistency. - :type skip_error_file: ~azure.mgmt.datafactory.models.SkipErrorFile + :vartype validate_data_consistency: any + :ivar skip_error_file: Specify the fault tolerance for data consistency. + :vartype skip_error_file: ~azure.mgmt.datafactory.models.SkipErrorFile """ _validation = { @@ -10932,6 +14681,66 @@ def __init__( skip_error_file: Optional["SkipErrorFile"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword inputs: List of inputs for the activity. + :paramtype inputs: list[~azure.mgmt.datafactory.models.DatasetReference] + :keyword outputs: List of outputs for the activity. + :paramtype outputs: list[~azure.mgmt.datafactory.models.DatasetReference] + :keyword source: Required. Copy activity source. + :paramtype source: ~azure.mgmt.datafactory.models.CopySource + :keyword sink: Required. Copy activity sink. + :paramtype sink: ~azure.mgmt.datafactory.models.CopySink + :keyword translator: Copy activity translator. If not specified, tabular translator is used. + :paramtype translator: any + :keyword enable_staging: Specifies whether to copy data via an interim staging. Default value + is false. Type: boolean (or Expression with resultType boolean). + :paramtype enable_staging: any + :keyword staging_settings: Specifies interim staging settings when EnableStaging is true. + :paramtype staging_settings: ~azure.mgmt.datafactory.models.StagingSettings + :keyword parallel_copies: Maximum number of concurrent sessions opened on the source or sink to + avoid overloading the data store. Type: integer (or Expression with resultType integer), + minimum: 0. + :paramtype parallel_copies: any + :keyword data_integration_units: Maximum number of data integration units that can be used to + perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. + :paramtype data_integration_units: any + :keyword enable_skip_incompatible_row: Whether to skip incompatible row. Default value is + false. Type: boolean (or Expression with resultType boolean). + :paramtype enable_skip_incompatible_row: any + :keyword redirect_incompatible_row_settings: Redirect incompatible row settings when + EnableSkipIncompatibleRow is true. + :paramtype redirect_incompatible_row_settings: + ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings + :keyword log_storage_settings: (Deprecated. Please use LogSettings) Log storage settings + customer need to provide when enabling session log. + :paramtype log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings + :keyword log_settings: Log settings customer needs provide when enabling log. + :paramtype log_settings: ~azure.mgmt.datafactory.models.LogSettings + :keyword preserve_rules: Preserve Rules. + :paramtype preserve_rules: list[any] + :keyword preserve: Preserve rules. + :paramtype preserve: list[any] + :keyword validate_data_consistency: Whether to enable Data Consistency validation. Type: + boolean (or Expression with resultType boolean). + :paramtype validate_data_consistency: any + :keyword skip_error_file: Specify the fault tolerance for data consistency. + :paramtype skip_error_file: ~azure.mgmt.datafactory.models.SkipErrorFile + """ super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'Copy' # type: str self.inputs = inputs @@ -10956,12 +14765,12 @@ def __init__( class CopyActivityLogSettings(msrest.serialization.Model): """Settings for copy activity log. - :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or + :ivar log_level: Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). - :type log_level: any - :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or + :vartype log_level: any + :ivar enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). - :type enable_reliable_logging: any + :vartype enable_reliable_logging: any """ _attribute_map = { @@ -10976,6 +14785,14 @@ def __init__( enable_reliable_logging: Optional[Any] = None, **kwargs ): + """ + :keyword log_level: Gets or sets the log level, support: Info, Warning. Type: string (or + Expression with resultType string). + :paramtype log_level: any + :keyword enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean + (or Expression with resultType boolean). + :paramtype enable_reliable_logging: any + """ super(CopyActivityLogSettings, self).__init__(**kwargs) self.log_level = log_level self.enable_reliable_logging = enable_reliable_logging @@ -10989,11 +14806,11 @@ class CopyTranslator(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy translator type.Constant filled by server. - :type type: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy translator type.Constant filled by server. + :vartype type: str """ _validation = { @@ -11015,6 +14832,11 @@ def __init__( additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(CopyTranslator, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = 'CopyTranslator' # type: str @@ -11025,59 +14847,59 @@ class CosmosDbLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: The connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param account_endpoint: The endpoint of the Azure CosmosDB account. Type: string (or - Expression with resultType string). - :type account_endpoint: any - :param database: The name of the database. Type: string (or Expression with resultType string). - :type database: any - :param account_key: The account key of the Azure CosmosDB account. Type: SecureString or + :vartype connection_string: any + :ivar account_endpoint: The endpoint of the Azure CosmosDB account. Type: string (or Expression + with resultType string). + :vartype account_endpoint: any + :ivar database: The name of the database. Type: string (or Expression with resultType string). + :vartype database: any + :ivar account_key: The account key of the Azure CosmosDB account. Type: SecureString or AzureKeyVaultSecretReference. - :type account_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_credential_type: The service principal credential type to use in + :vartype account_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar service_principal_id: The client ID of the application in Azure Active Directory used for + Server-To-Server authentication. Type: string (or Expression with resultType string). + :vartype service_principal_id: any + :ivar service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). Possible values include: "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or + :vartype service_principal_credential_type: str or ~azure.mgmt.datafactory.models.CosmosDbServicePrincipalCredentialType - :param service_principal_credential: The credential of the service principal object in Azure + :ivar service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. - :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: any - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + :vartype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string + (or Expression with resultType string). + :vartype tenant: any + :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: any - :param connection_mode: The connection mode used to access CosmosDB account. Type: string (or + :vartype azure_cloud_type: any + :ivar connection_mode: The connection mode used to access CosmosDB account. Type: string (or Expression with resultType string). Possible values include: "Gateway", "Direct". - :type connection_mode: str or ~azure.mgmt.datafactory.models.CosmosDbConnectionMode - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_mode: str or ~azure.mgmt.datafactory.models.CosmosDbConnectionMode + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -11125,6 +14947,60 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword account_endpoint: The endpoint of the Azure CosmosDB account. Type: string (or + Expression with resultType string). + :paramtype account_endpoint: any + :keyword database: The name of the database. Type: string (or Expression with resultType + string). + :paramtype database: any + :keyword account_key: The account key of the Azure CosmosDB account. Type: SecureString or + AzureKeyVaultSecretReference. + :paramtype account_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword service_principal_id: The client ID of the application in Azure Active Directory used + for Server-To-Server authentication. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). Possible values include: + "ServicePrincipalKey", "ServicePrincipalCert". + :paramtype service_principal_credential_type: str or + ~azure.mgmt.datafactory.models.CosmosDbServicePrincipalCredentialType + :keyword service_principal_credential: The credential of the service principal object in Azure + Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', + servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If + servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only + be AzureKeyVaultSecretReference. + :paramtype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :paramtype tenant: any + :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. + Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is + the data factory regions’ cloud type. Type: string (or Expression with resultType string). + :paramtype azure_cloud_type: any + :keyword connection_mode: The connection mode used to access CosmosDB account. Type: string (or + Expression with resultType string). Possible values include: "Gateway", "Direct". + :paramtype connection_mode: str or ~azure.mgmt.datafactory.models.CosmosDbConnectionMode + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(CosmosDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'CosmosDb' # type: str self.connection_string = connection_string @@ -11145,31 +15021,31 @@ class CosmosDbMongoDbApiCollectionDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param collection: Required. The collection name of the CosmosDB (MongoDB API) database. Type: + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar collection: Required. The collection name of the CosmosDB (MongoDB API) database. Type: string (or Expression with resultType string). - :type collection: any + :vartype collection: any """ _validation = { @@ -11205,6 +15081,31 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword collection: Required. The collection name of the CosmosDB (MongoDB API) database. + Type: string (or Expression with resultType string). + :paramtype collection: any + """ super(CosmosDbMongoDbApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'CosmosDbMongoDbApiCollection' # type: str self.collection = collection @@ -11215,29 +15116,29 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param is_server_version_above32: Whether the CosmosDB (MongoDB API) server version is higher + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar is_server_version_above32: Whether the CosmosDB (MongoDB API) server version is higher than 3.2. The default value is false. Type: boolean (or Expression with resultType boolean). - :type is_server_version_above32: any - :param connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: string, + :vartype is_server_version_above32: any + :ivar connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param database: Required. The name of the CosmosDB (MongoDB API) database that you want to + :vartype connection_string: any + :ivar database: Required. The name of the CosmosDB (MongoDB API) database that you want to access. Type: string (or Expression with resultType string). - :type database: any + :vartype database: any """ _validation = { @@ -11271,6 +15172,29 @@ def __init__( is_server_version_above32: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword is_server_version_above32: Whether the CosmosDB (MongoDB API) server version is higher + than 3.2. The default value is false. Type: boolean (or Expression with resultType boolean). + :paramtype is_server_version_above32: any + :keyword connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: + string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword database: Required. The name of the CosmosDB (MongoDB API) database that you want to + access. Type: string (or Expression with resultType string). + :paramtype database: any + """ super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'CosmosDbMongoDbApi' # type: str self.is_server_version_above32 = is_server_version_above32 @@ -11283,33 +15207,33 @@ class CosmosDbMongoDbApiSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + :vartype disable_metrics_collection: any + :ivar write_behavior: Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). - :type write_behavior: any + :vartype write_behavior: any """ _validation = { @@ -11341,6 +15265,33 @@ def __init__( write_behavior: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword write_behavior: Specifies whether the document with same key to be overwritten + (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :paramtype write_behavior: any + """ super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CosmosDbMongoDbApiSink' # type: str self.write_behavior = write_behavior @@ -11351,40 +15302,40 @@ class CosmosDbMongoDbApiSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param filter: Specifies selection filter using query operators. To return all documents in a + :vartype disable_metrics_collection: any + :ivar filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). - :type filter: any - :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties - :param batch_size: Specifies the number of documents to return in each batch of the response + :vartype filter: any + :ivar cursor_methods: Cursor methods for Mongodb query. + :vartype cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :ivar batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). - :type batch_size: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype batch_size: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -11420,6 +15371,40 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword filter: Specifies selection filter using query operators. To return all documents in a + collection, omit this parameter or pass an empty document ({}). Type: string (or Expression + with resultType string). + :paramtype filter: any + :keyword cursor_methods: Cursor methods for Mongodb query. + :paramtype cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :keyword batch_size: Specifies the number of documents to return in each batch of the response + from MongoDB instance. In most cases, modifying the batch size will not affect the user or the + application. This property's main purpose is to avoid hit the limitation of response size. + Type: integer (or Expression with resultType integer). + :paramtype batch_size: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CosmosDbMongoDbApiSource' # type: str self.filter = filter @@ -11434,31 +15419,31 @@ class CosmosDbSqlApiCollectionDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param collection_name: Required. CosmosDB (SQL API) collection name. Type: string (or + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar collection_name: Required. CosmosDB (SQL API) collection name. Type: string (or Expression with resultType string). - :type collection_name: any + :vartype collection_name: any """ _validation = { @@ -11494,6 +15479,31 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword collection_name: Required. CosmosDB (SQL API) collection name. Type: string (or + Expression with resultType string). + :paramtype collection_name: any + """ super(CosmosDbSqlApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'CosmosDbSqlApiCollection' # type: str self.collection_name = collection_name @@ -11504,32 +15514,32 @@ class CosmosDbSqlApiSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or + :vartype disable_metrics_collection: any + :ivar write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. - :type write_behavior: any + :vartype write_behavior: any """ _validation = { @@ -11561,6 +15571,32 @@ def __init__( write_behavior: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or + Expression with resultType string). Allowed values: insert and upsert. + :paramtype write_behavior: any + """ super(CosmosDbSqlApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CosmosDbSqlApiSink' # type: str self.write_behavior = write_behavior @@ -11571,37 +15607,37 @@ class CosmosDbSqlApiSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: SQL API query. Type: string (or Expression with resultType string). - :type query: any - :param page_size: Page size of the result. Type: integer (or Expression with resultType + :vartype disable_metrics_collection: any + :ivar query: SQL API query. Type: string (or Expression with resultType string). + :vartype query: any + :ivar page_size: Page size of the result. Type: integer (or Expression with resultType integer). - :type page_size: any - :param preferred_regions: Preferred regions. Type: array of strings (or Expression with + :vartype page_size: any + :ivar preferred_regions: Preferred regions. Type: array of strings (or Expression with resultType array of strings). - :type preferred_regions: any - :param detect_datetime: Whether detect primitive values as datetime values. Type: boolean (or + :vartype preferred_regions: any + :ivar detect_datetime: Whether detect primitive values as datetime values. Type: boolean (or Expression with resultType boolean). - :type detect_datetime: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype detect_datetime: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -11637,6 +15673,37 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: SQL API query. Type: string (or Expression with resultType string). + :paramtype query: any + :keyword page_size: Page size of the result. Type: integer (or Expression with resultType + integer). + :paramtype page_size: any + :keyword preferred_regions: Preferred regions. Type: array of strings (or Expression with + resultType array of strings). + :paramtype preferred_regions: any + :keyword detect_datetime: Whether detect primitive values as datetime values. Type: boolean (or + Expression with resultType boolean). + :paramtype detect_datetime: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(CosmosDbSqlApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CosmosDbSqlApiSource' # type: str self.query = query @@ -11651,28 +15718,28 @@ class CouchbaseLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: An ODBC connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param cred_string: The Azure key vault secret reference of credString in connection string. - :type cred_string: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_string: any + :ivar cred_string: The Azure key vault secret reference of credString in connection string. + :vartype cred_string: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -11704,6 +15771,28 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword cred_string: The Azure key vault secret reference of credString in connection string. + :paramtype cred_string: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(CouchbaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Couchbase' # type: str self.connection_string = connection_string @@ -11716,32 +15805,32 @@ class CouchbaseSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -11773,6 +15862,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'CouchbaseSource' # type: str self.query = query @@ -11783,30 +15898,30 @@ class CouchbaseTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -11841,6 +15956,30 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(CouchbaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'CouchbaseTable' # type: str self.table_name = table_name @@ -11849,16 +15988,16 @@ def __init__( class CreateDataFlowDebugSessionRequest(msrest.serialization.Model): """Request body structure for creating data flow debug session. - :param compute_type: Compute type of the cluster. The value will be overwritten by the same + :ivar compute_type: Compute type of the cluster. The value will be overwritten by the same setting in integration runtime if provided. - :type compute_type: str - :param core_count: Core count of the cluster. The value will be overwritten by the same setting + :vartype compute_type: str + :ivar core_count: Core count of the cluster. The value will be overwritten by the same setting in integration runtime if provided. - :type core_count: int - :param time_to_live: Time to live setting of the cluster in minutes. - :type time_to_live: int - :param integration_runtime: Set to use integration runtime setting for data flow debug session. - :type integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeDebugResource + :vartype core_count: int + :ivar time_to_live: Time to live setting of the cluster in minutes. + :vartype time_to_live: int + :ivar integration_runtime: Set to use integration runtime setting for data flow debug session. + :vartype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeDebugResource """ _attribute_map = { @@ -11877,6 +16016,19 @@ def __init__( integration_runtime: Optional["IntegrationRuntimeDebugResource"] = None, **kwargs ): + """ + :keyword compute_type: Compute type of the cluster. The value will be overwritten by the same + setting in integration runtime if provided. + :paramtype compute_type: str + :keyword core_count: Core count of the cluster. The value will be overwritten by the same + setting in integration runtime if provided. + :paramtype core_count: int + :keyword time_to_live: Time to live setting of the cluster in minutes. + :paramtype time_to_live: int + :keyword integration_runtime: Set to use integration runtime setting for data flow debug + session. + :paramtype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeDebugResource + """ super(CreateDataFlowDebugSessionRequest, self).__init__(**kwargs) self.compute_type = compute_type self.core_count = core_count @@ -11887,10 +16039,10 @@ def __init__( class CreateDataFlowDebugSessionResponse(msrest.serialization.Model): """Response body structure for creating data flow debug session. - :param status: The state of the debug session. - :type status: str - :param session_id: The ID of data flow debug session. - :type session_id: str + :ivar status: The state of the debug session. + :vartype status: str + :ivar session_id: The ID of data flow debug session. + :vartype session_id: str """ _attribute_map = { @@ -11905,6 +16057,12 @@ def __init__( session_id: Optional[str] = None, **kwargs ): + """ + :keyword status: The state of the debug session. + :paramtype status: str + :keyword session_id: The ID of data flow debug session. + :paramtype session_id: str + """ super(CreateDataFlowDebugSessionResponse, self).__init__(**kwargs) self.status = status self.session_id = session_id @@ -11913,17 +16071,17 @@ def __init__( class CreateLinkedIntegrationRuntimeRequest(msrest.serialization.Model): """The linked integration runtime information. - :param name: The name of the linked integration runtime. - :type name: str - :param subscription_id: The ID of the subscription that the linked integration runtime belongs + :ivar name: The name of the linked integration runtime. + :vartype name: str + :ivar subscription_id: The ID of the subscription that the linked integration runtime belongs to. - :type subscription_id: str - :param data_factory_name: The name of the data factory that the linked integration runtime + :vartype subscription_id: str + :ivar data_factory_name: The name of the data factory that the linked integration runtime belongs to. - :type data_factory_name: str - :param data_factory_location: The location of the data factory that the linked integration + :vartype data_factory_name: str + :ivar data_factory_location: The location of the data factory that the linked integration runtime belongs to. - :type data_factory_location: str + :vartype data_factory_location: str """ _attribute_map = { @@ -11942,6 +16100,19 @@ def __init__( data_factory_location: Optional[str] = None, **kwargs ): + """ + :keyword name: The name of the linked integration runtime. + :paramtype name: str + :keyword subscription_id: The ID of the subscription that the linked integration runtime + belongs to. + :paramtype subscription_id: str + :keyword data_factory_name: The name of the data factory that the linked integration runtime + belongs to. + :paramtype data_factory_name: str + :keyword data_factory_location: The location of the data factory that the linked integration + runtime belongs to. + :paramtype data_factory_location: str + """ super(CreateLinkedIntegrationRuntimeRequest, self).__init__(**kwargs) self.name = name self.subscription_id = subscription_id @@ -11954,8 +16125,8 @@ class CreateRunResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param run_id: Required. Identifier of a run. - :type run_id: str + :ivar run_id: Required. Identifier of a run. + :vartype run_id: str """ _validation = { @@ -11972,6 +16143,10 @@ def __init__( run_id: str, **kwargs ): + """ + :keyword run_id: Required. Identifier of a run. + :paramtype run_id: str + """ super(CreateRunResponse, self).__init__(**kwargs) self.run_id = run_id @@ -11984,15 +16159,15 @@ class Credential(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of credential.Constant filled by server. - :type type: str - :param description: Credential description. - :type description: str - :param annotations: List of tags that can be used for describing the Credential. - :type annotations: list[any] + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of credential.Constant filled by server. + :vartype type: str + :ivar description: Credential description. + :vartype description: str + :ivar annotations: List of tags that can be used for describing the Credential. + :vartype annotations: list[any] """ _validation = { @@ -12018,6 +16193,15 @@ def __init__( annotations: Optional[List[Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Credential description. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the Credential. + :paramtype annotations: list[any] + """ super(Credential, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = 'Credential' # type: str @@ -12032,13 +16216,13 @@ class CredentialReference(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] + :vartype additional_properties: dict[str, any] :ivar type: Credential reference type. Has constant value: "CredentialReference". :vartype type: str - :param reference_name: Required. Reference credential name. - :type reference_name: str + :ivar reference_name: Required. Reference credential name. + :vartype reference_name: str """ _validation = { @@ -12061,6 +16245,13 @@ def __init__( additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword reference_name: Required. Reference credential name. + :paramtype reference_name: str + """ super(CredentialReference, self).__init__(**kwargs) self.additional_properties = additional_properties self.reference_name = reference_name @@ -12099,6 +16290,8 @@ def __init__( self, **kwargs ): + """ + """ super(SubResource, self).__init__(**kwargs) self.id = None self.name = None @@ -12121,8 +16314,8 @@ class CredentialResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Required. Properties of credentials. - :type properties: ~azure.mgmt.datafactory.models.Credential + :ivar properties: Required. Properties of credentials. + :vartype properties: ~azure.mgmt.datafactory.models.Credential """ _validation = { @@ -12147,6 +16340,10 @@ def __init__( properties: "Credential", **kwargs ): + """ + :keyword properties: Required. Properties of credentials. + :paramtype properties: ~azure.mgmt.datafactory.models.Credential + """ super(CredentialResource, self).__init__(**kwargs) self.properties = properties @@ -12156,43 +16353,43 @@ class CustomActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param command: Required. Command for custom activity Type: string (or Expression with + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar command: Required. Command for custom activity Type: string (or Expression with resultType string). - :type command: any - :param resource_linked_service: Resource linked service reference. - :type resource_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param folder_path: Folder path for resource files Type: string (or Expression with resultType + :vartype command: any + :ivar resource_linked_service: Resource linked service reference. + :vartype resource_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar folder_path: Folder path for resource files Type: string (or Expression with resultType string). - :type folder_path: any - :param reference_objects: Reference objects. - :type reference_objects: ~azure.mgmt.datafactory.models.CustomActivityReferenceObject - :param extended_properties: User defined property bag. There is no restriction on the keys or + :vartype folder_path: any + :ivar reference_objects: Reference objects. + :vartype reference_objects: ~azure.mgmt.datafactory.models.CustomActivityReferenceObject + :ivar extended_properties: User defined property bag. There is no restriction on the keys or values that can be used. The user specified custom activity has the full responsibility to consume and interpret the content defined. - :type extended_properties: dict[str, any] - :param retention_time_in_days: The retention time for the files submitted for custom activity. + :vartype extended_properties: dict[str, any] + :ivar retention_time_in_days: The retention time for the files submitted for custom activity. Type: double (or Expression with resultType double). - :type retention_time_in_days: any - :param auto_user_specification: Elevation level and scope for the user, default is nonadmin + :vartype retention_time_in_days: any + :ivar auto_user_specification: Elevation level and scope for the user, default is nonadmin task. Type: string (or Expression with resultType double). - :type auto_user_specification: any + :vartype auto_user_specification: any """ _validation = { @@ -12238,6 +16435,43 @@ def __init__( auto_user_specification: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword command: Required. Command for custom activity Type: string (or Expression with + resultType string). + :paramtype command: any + :keyword resource_linked_service: Resource linked service reference. + :paramtype resource_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword folder_path: Folder path for resource files Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword reference_objects: Reference objects. + :paramtype reference_objects: ~azure.mgmt.datafactory.models.CustomActivityReferenceObject + :keyword extended_properties: User defined property bag. There is no restriction on the keys or + values that can be used. The user specified custom activity has the full responsibility to + consume and interpret the content defined. + :paramtype extended_properties: dict[str, any] + :keyword retention_time_in_days: The retention time for the files submitted for custom + activity. Type: double (or Expression with resultType double). + :paramtype retention_time_in_days: any + :keyword auto_user_specification: Elevation level and scope for the user, default is nonadmin + task. Type: string (or Expression with resultType double). + :paramtype auto_user_specification: any + """ super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'Custom' # type: str self.command = command @@ -12252,10 +16486,10 @@ def __init__( class CustomActivityReferenceObject(msrest.serialization.Model): """Reference objects for custom activity. - :param linked_services: Linked service references. - :type linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param datasets: Dataset references. - :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] + :ivar linked_services: Linked service references. + :vartype linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :ivar datasets: Dataset references. + :vartype datasets: list[~azure.mgmt.datafactory.models.DatasetReference] """ _attribute_map = { @@ -12270,6 +16504,12 @@ def __init__( datasets: Optional[List["DatasetReference"]] = None, **kwargs ): + """ + :keyword linked_services: Linked service references. + :paramtype linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :keyword datasets: Dataset references. + :paramtype datasets: list[~azure.mgmt.datafactory.models.DatasetReference] + """ super(CustomActivityReferenceObject, self).__init__(**kwargs) self.linked_services = linked_services self.datasets = datasets @@ -12280,30 +16520,30 @@ class CustomDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type_properties: Custom dataset properties. - :type type_properties: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar type_properties: Custom dataset properties. + :vartype type_properties: any """ _validation = { @@ -12338,6 +16578,30 @@ def __init__( type_properties: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword type_properties: Custom dataset properties. + :paramtype type_properties: any + """ super(CustomDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'CustomDataset' # type: str self.type_properties = type_properties @@ -12348,21 +16612,21 @@ class CustomDataSourceLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param type_properties: Required. Custom linked service properties. - :type type_properties: any + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar type_properties: Required. Custom linked service properties. + :vartype type_properties: any """ _validation = { @@ -12391,6 +16655,21 @@ def __init__( annotations: Optional[List[Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword type_properties: Required. Custom linked service properties. + :paramtype type_properties: any + """ super(CustomDataSourceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'CustomDataSource' # type: str self.type_properties = type_properties @@ -12403,30 +16682,30 @@ class CustomEventsTrigger(MultiplePipelineTrigger): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. Trigger type.Constant filled by server. + :vartype type: str + :ivar description: Trigger description. + :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[any] - :param pipelines: Pipelines that need to be started. - :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param subject_begins_with: The event subject must begin with the pattern provided for trigger + :ivar annotations: List of tags that can be used for describing the trigger. + :vartype annotations: list[any] + :ivar pipelines: Pipelines that need to be started. + :vartype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :ivar subject_begins_with: The event subject must begin with the pattern provided for trigger to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. - :type subject_begins_with: str - :param subject_ends_with: The event subject must end with the pattern provided for trigger to + :vartype subject_begins_with: str + :ivar subject_ends_with: The event subject must end with the pattern provided for trigger to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. - :type subject_ends_with: str - :param events: Required. The list of event types that cause this trigger to fire. - :type events: list[any] - :param scope: Required. The ARM resource ID of the Azure Event Grid Topic. - :type scope: str + :vartype subject_ends_with: str + :ivar events: Required. The list of event types that cause this trigger to fire. + :vartype events: list[any] + :ivar scope: Required. The ARM resource ID of the Azure Event Grid Topic. + :vartype scope: str """ _validation = { @@ -12462,6 +16741,27 @@ def __init__( subject_ends_with: Optional[str] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Trigger description. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the trigger. + :paramtype annotations: list[any] + :keyword pipelines: Pipelines that need to be started. + :paramtype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :keyword subject_begins_with: The event subject must begin with the pattern provided for + trigger to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. + :paramtype subject_begins_with: str + :keyword subject_ends_with: The event subject must end with the pattern provided for trigger to + fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. + :paramtype subject_ends_with: str + :keyword events: Required. The list of event types that cause this trigger to fire. + :paramtype events: list[any] + :keyword scope: Required. The ARM resource ID of the Azure Event Grid Topic. + :paramtype scope: str + """ super(CustomEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) self.type = 'CustomEventsTrigger' # type: str self.subject_begins_with = subject_begins_with @@ -12475,32 +16775,32 @@ class DatabricksNotebookActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param notebook_path: Required. The absolute path of the notebook to be run in the Databricks + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar notebook_path: Required. The absolute path of the notebook to be run in the Databricks Workspace. This path must begin with a slash. Type: string (or Expression with resultType string). - :type notebook_path: any - :param base_parameters: Base parameters to be used for each run of this job.If the notebook + :vartype notebook_path: any + :ivar base_parameters: Base parameters to be used for each run of this job.If the notebook takes a parameter that is not specified, the default value from the notebook will be used. - :type base_parameters: dict[str, any] - :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, any]] + :vartype base_parameters: dict[str, any] + :ivar libraries: A list of libraries to be installed on the cluster that will execute the job. + :vartype libraries: list[dict[str, any]] """ _validation = { @@ -12538,6 +16838,33 @@ def __init__( libraries: Optional[List[Dict[str, Any]]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword notebook_path: Required. The absolute path of the notebook to be run in the Databricks + Workspace. This path must begin with a slash. Type: string (or Expression with resultType + string). + :paramtype notebook_path: any + :keyword base_parameters: Base parameters to be used for each run of this job.If the notebook + takes a parameter that is not specified, the default value from the notebook will be used. + :paramtype base_parameters: dict[str, any] + :keyword libraries: A list of libraries to be installed on the cluster that will execute the + job. + :paramtype libraries: list[dict[str, any]] + """ super(DatabricksNotebookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'DatabricksNotebook' # type: str self.notebook_path = notebook_path @@ -12550,31 +16877,31 @@ class DatabricksSparkJarActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param main_class_name: Required. The full name of the class containing the main method to be + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar main_class_name: Required. The full name of the class containing the main method to be executed. This class must be contained in a JAR provided as a library. Type: string (or Expression with resultType string). - :type main_class_name: any - :param parameters: Parameters that will be passed to the main method. - :type parameters: list[any] - :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, any]] + :vartype main_class_name: any + :ivar parameters: Parameters that will be passed to the main method. + :vartype parameters: list[any] + :ivar libraries: A list of libraries to be installed on the cluster that will execute the job. + :vartype libraries: list[dict[str, any]] """ _validation = { @@ -12612,6 +16939,32 @@ def __init__( libraries: Optional[List[Dict[str, Any]]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword main_class_name: Required. The full name of the class containing the main method to be + executed. This class must be contained in a JAR provided as a library. Type: string (or + Expression with resultType string). + :paramtype main_class_name: any + :keyword parameters: Parameters that will be passed to the main method. + :paramtype parameters: list[any] + :keyword libraries: A list of libraries to be installed on the cluster that will execute the + job. + :paramtype libraries: list[dict[str, any]] + """ super(DatabricksSparkJarActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'DatabricksSparkJar' # type: str self.main_class_name = main_class_name @@ -12624,30 +16977,30 @@ class DatabricksSparkPythonActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param python_file: Required. The URI of the Python file to be executed. DBFS paths are + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar python_file: Required. The URI of the Python file to be executed. DBFS paths are supported. Type: string (or Expression with resultType string). - :type python_file: any - :param parameters: Command line parameters that will be passed to the Python file. - :type parameters: list[any] - :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, any]] + :vartype python_file: any + :ivar parameters: Command line parameters that will be passed to the Python file. + :vartype parameters: list[any] + :ivar libraries: A list of libraries to be installed on the cluster that will execute the job. + :vartype libraries: list[dict[str, any]] """ _validation = { @@ -12685,6 +17038,31 @@ def __init__( libraries: Optional[List[Dict[str, Any]]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword python_file: Required. The URI of the Python file to be executed. DBFS paths are + supported. Type: string (or Expression with resultType string). + :paramtype python_file: any + :keyword parameters: Command line parameters that will be passed to the Python file. + :paramtype parameters: list[any] + :keyword libraries: A list of libraries to be installed on the cluster that will execute the + job. + :paramtype libraries: list[dict[str, any]] + """ super(DatabricksSparkPythonActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'DatabricksSparkPython' # type: str self.python_file = python_file @@ -12700,15 +17078,15 @@ class DataFlow(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of data flow.Constant filled by server. - :type type: str - :param description: The description of the data flow. - :type description: str - :param annotations: List of tags that can be used for describing the data flow. - :type annotations: list[any] - :param folder: The folder that this data flow is in. If not specified, Data flow will appear at + :ivar type: Required. Type of data flow.Constant filled by server. + :vartype type: str + :ivar description: The description of the data flow. + :vartype description: str + :ivar annotations: List of tags that can be used for describing the data flow. + :vartype annotations: list[any] + :ivar folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder + :vartype folder: ~azure.mgmt.datafactory.models.DataFlowFolder """ _validation = { @@ -12734,6 +17112,15 @@ def __init__( folder: Optional["DataFlowFolder"] = None, **kwargs ): + """ + :keyword description: The description of the data flow. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the data flow. + :paramtype annotations: list[any] + :keyword folder: The folder that this data flow is in. If not specified, Data flow will appear + at the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DataFlowFolder + """ super(DataFlow, self).__init__(**kwargs) self.type = None # type: Optional[str] self.description = description @@ -12746,14 +17133,14 @@ class DataFlowDebugCommandPayload(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param stream_name: Required. The stream name which is used for preview. - :type stream_name: str - :param row_limits: Row limits for preview response. - :type row_limits: int - :param columns: Array of column names. - :type columns: list[str] - :param expression: The expression which is used for preview. - :type expression: str + :ivar stream_name: Required. The stream name which is used for preview. + :vartype stream_name: str + :ivar row_limits: Row limits for preview response. + :vartype row_limits: int + :ivar columns: Array of column names. + :vartype columns: list[str] + :ivar expression: The expression which is used for preview. + :vartype expression: str """ _validation = { @@ -12776,6 +17163,16 @@ def __init__( expression: Optional[str] = None, **kwargs ): + """ + :keyword stream_name: Required. The stream name which is used for preview. + :paramtype stream_name: str + :keyword row_limits: Row limits for preview response. + :paramtype row_limits: int + :keyword columns: Array of column names. + :paramtype columns: list[str] + :keyword expression: The expression which is used for preview. + :paramtype expression: str + """ super(DataFlowDebugCommandPayload, self).__init__(**kwargs) self.stream_name = stream_name self.row_limits = row_limits @@ -12786,13 +17183,13 @@ def __init__( class DataFlowDebugCommandRequest(msrest.serialization.Model): """Request body structure for data flow debug command. - :param session_id: The ID of data flow debug session. - :type session_id: str - :param command: The command type. Possible values include: "executePreviewQuery", + :ivar session_id: The ID of data flow debug session. + :vartype session_id: str + :ivar command: The command type. Possible values include: "executePreviewQuery", "executeStatisticsQuery", "executeExpressionQuery". - :type command: str or ~azure.mgmt.datafactory.models.DataFlowDebugCommandType - :param command_payload: The command payload object. - :type command_payload: ~azure.mgmt.datafactory.models.DataFlowDebugCommandPayload + :vartype command: str or ~azure.mgmt.datafactory.models.DataFlowDebugCommandType + :ivar command_payload: The command payload object. + :vartype command_payload: ~azure.mgmt.datafactory.models.DataFlowDebugCommandPayload """ _attribute_map = { @@ -12809,6 +17206,15 @@ def __init__( command_payload: Optional["DataFlowDebugCommandPayload"] = None, **kwargs ): + """ + :keyword session_id: The ID of data flow debug session. + :paramtype session_id: str + :keyword command: The command type. Possible values include: "executePreviewQuery", + "executeStatisticsQuery", "executeExpressionQuery". + :paramtype command: str or ~azure.mgmt.datafactory.models.DataFlowDebugCommandType + :keyword command_payload: The command payload object. + :paramtype command_payload: ~azure.mgmt.datafactory.models.DataFlowDebugCommandPayload + """ super(DataFlowDebugCommandRequest, self).__init__(**kwargs) self.session_id = session_id self.command = command @@ -12818,10 +17224,10 @@ def __init__( class DataFlowDebugCommandResponse(msrest.serialization.Model): """Response body structure of data flow result for data preview, statistics or expression preview. - :param status: The run status of data preview, statistics or expression preview. - :type status: str - :param data: The result data of data preview, statistics or expression preview. - :type data: str + :ivar status: The run status of data preview, statistics or expression preview. + :vartype status: str + :ivar data: The result data of data preview, statistics or expression preview. + :vartype data: str """ _attribute_map = { @@ -12836,6 +17242,12 @@ def __init__( data: Optional[str] = None, **kwargs ): + """ + :keyword status: The run status of data preview, statistics or expression preview. + :paramtype status: str + :keyword data: The result data of data preview, statistics or expression preview. + :paramtype data: str + """ super(DataFlowDebugCommandResponse, self).__init__(**kwargs) self.status = status self.data = data @@ -12844,23 +17256,23 @@ def __init__( class DataFlowDebugPackage(msrest.serialization.Model): """Request body structure for starting data flow debug session. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param session_id: The ID of data flow debug session. - :type session_id: str - :param data_flow: Data flow instance. - :type data_flow: ~azure.mgmt.datafactory.models.DataFlowDebugResource - :param data_flows: List of Data flows. - :type data_flows: list[~azure.mgmt.datafactory.models.DataFlowDebugResource] - :param datasets: List of datasets. - :type datasets: list[~azure.mgmt.datafactory.models.DatasetDebugResource] - :param linked_services: List of linked services. - :type linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceDebugResource] - :param staging: Staging info for debug session. - :type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo - :param debug_settings: Data flow debug settings. - :type debug_settings: ~azure.mgmt.datafactory.models.DataFlowDebugPackageDebugSettings + :vartype additional_properties: dict[str, any] + :ivar session_id: The ID of data flow debug session. + :vartype session_id: str + :ivar data_flow: Data flow instance. + :vartype data_flow: ~azure.mgmt.datafactory.models.DataFlowDebugResource + :ivar data_flows: List of Data flows. + :vartype data_flows: list[~azure.mgmt.datafactory.models.DataFlowDebugResource] + :ivar datasets: List of datasets. + :vartype datasets: list[~azure.mgmt.datafactory.models.DatasetDebugResource] + :ivar linked_services: List of linked services. + :vartype linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceDebugResource] + :ivar staging: Staging info for debug session. + :vartype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo + :ivar debug_settings: Data flow debug settings. + :vartype debug_settings: ~azure.mgmt.datafactory.models.DataFlowDebugPackageDebugSettings """ _attribute_map = { @@ -12887,6 +17299,25 @@ def __init__( debug_settings: Optional["DataFlowDebugPackageDebugSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword session_id: The ID of data flow debug session. + :paramtype session_id: str + :keyword data_flow: Data flow instance. + :paramtype data_flow: ~azure.mgmt.datafactory.models.DataFlowDebugResource + :keyword data_flows: List of Data flows. + :paramtype data_flows: list[~azure.mgmt.datafactory.models.DataFlowDebugResource] + :keyword datasets: List of datasets. + :paramtype datasets: list[~azure.mgmt.datafactory.models.DatasetDebugResource] + :keyword linked_services: List of linked services. + :paramtype linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceDebugResource] + :keyword staging: Staging info for debug session. + :paramtype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo + :keyword debug_settings: Data flow debug settings. + :paramtype debug_settings: ~azure.mgmt.datafactory.models.DataFlowDebugPackageDebugSettings + """ super(DataFlowDebugPackage, self).__init__(**kwargs) self.additional_properties = additional_properties self.session_id = session_id @@ -12901,12 +17332,12 @@ def __init__( class DataFlowDebugPackageDebugSettings(msrest.serialization.Model): """Data flow debug settings. - :param source_settings: Source setting for data flow debug. - :type source_settings: list[~azure.mgmt.datafactory.models.DataFlowSourceSetting] - :param parameters: Data flow parameters. - :type parameters: dict[str, any] - :param dataset_parameters: Parameters for dataset. - :type dataset_parameters: any + :ivar source_settings: Source setting for data flow debug. + :vartype source_settings: list[~azure.mgmt.datafactory.models.DataFlowSourceSetting] + :ivar parameters: Data flow parameters. + :vartype parameters: dict[str, any] + :ivar dataset_parameters: Parameters for dataset. + :vartype dataset_parameters: any """ _attribute_map = { @@ -12923,6 +17354,14 @@ def __init__( dataset_parameters: Optional[Any] = None, **kwargs ): + """ + :keyword source_settings: Source setting for data flow debug. + :paramtype source_settings: list[~azure.mgmt.datafactory.models.DataFlowSourceSetting] + :keyword parameters: Data flow parameters. + :paramtype parameters: dict[str, any] + :keyword dataset_parameters: Parameters for dataset. + :paramtype dataset_parameters: any + """ super(DataFlowDebugPackageDebugSettings, self).__init__(**kwargs) self.source_settings = source_settings self.parameters = parameters @@ -12932,8 +17371,8 @@ def __init__( class SubResourceDebugResource(msrest.serialization.Model): """Azure Data Factory nested debug resource. - :param name: The resource name. - :type name: str + :ivar name: The resource name. + :vartype name: str """ _attribute_map = { @@ -12946,6 +17385,10 @@ def __init__( name: Optional[str] = None, **kwargs ): + """ + :keyword name: The resource name. + :paramtype name: str + """ super(SubResourceDebugResource, self).__init__(**kwargs) self.name = name @@ -12955,10 +17398,10 @@ class DataFlowDebugResource(SubResourceDebugResource): All required parameters must be populated in order to send to Azure. - :param name: The resource name. - :type name: str - :param properties: Required. Data flow properties. - :type properties: ~azure.mgmt.datafactory.models.DataFlow + :ivar name: The resource name. + :vartype name: str + :ivar properties: Required. Data flow properties. + :vartype properties: ~azure.mgmt.datafactory.models.DataFlow """ _validation = { @@ -12977,6 +17420,12 @@ def __init__( name: Optional[str] = None, **kwargs ): + """ + :keyword name: The resource name. + :paramtype name: str + :keyword properties: Required. Data flow properties. + :paramtype properties: ~azure.mgmt.datafactory.models.DataFlow + """ super(DataFlowDebugResource, self).__init__(name=name, **kwargs) self.properties = properties @@ -12984,27 +17433,27 @@ def __init__( class DataFlowDebugSessionInfo(msrest.serialization.Model): """Data flow debug session info. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param data_flow_name: The name of the data flow. - :type data_flow_name: str - :param compute_type: Compute type of the cluster. - :type compute_type: str - :param core_count: Core count of the cluster. - :type core_count: int - :param node_count: Node count of the cluster. (deprecated property). - :type node_count: int - :param integration_runtime_name: Attached integration runtime name of data flow debug session. - :type integration_runtime_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str - :param start_time: Start time of data flow debug session. - :type start_time: str - :param time_to_live_in_minutes: Compute type of the cluster. - :type time_to_live_in_minutes: int - :param last_activity_time: Last activity time of data flow debug session. - :type last_activity_time: str + :vartype additional_properties: dict[str, any] + :ivar data_flow_name: The name of the data flow. + :vartype data_flow_name: str + :ivar compute_type: Compute type of the cluster. + :vartype compute_type: str + :ivar core_count: Core count of the cluster. + :vartype core_count: int + :ivar node_count: Node count of the cluster. (deprecated property). + :vartype node_count: int + :ivar integration_runtime_name: Attached integration runtime name of data flow debug session. + :vartype integration_runtime_name: str + :ivar session_id: The ID of data flow debug session. + :vartype session_id: str + :ivar start_time: Start time of data flow debug session. + :vartype start_time: str + :ivar time_to_live_in_minutes: Compute type of the cluster. + :vartype time_to_live_in_minutes: int + :ivar last_activity_time: Last activity time of data flow debug session. + :vartype last_activity_time: str """ _attribute_map = { @@ -13035,6 +17484,30 @@ def __init__( last_activity_time: Optional[str] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword data_flow_name: The name of the data flow. + :paramtype data_flow_name: str + :keyword compute_type: Compute type of the cluster. + :paramtype compute_type: str + :keyword core_count: Core count of the cluster. + :paramtype core_count: int + :keyword node_count: Node count of the cluster. (deprecated property). + :paramtype node_count: int + :keyword integration_runtime_name: Attached integration runtime name of data flow debug + session. + :paramtype integration_runtime_name: str + :keyword session_id: The ID of data flow debug session. + :paramtype session_id: str + :keyword start_time: Start time of data flow debug session. + :paramtype start_time: str + :keyword time_to_live_in_minutes: Compute type of the cluster. + :paramtype time_to_live_in_minutes: int + :keyword last_activity_time: Last activity time of data flow debug session. + :paramtype last_activity_time: str + """ super(DataFlowDebugSessionInfo, self).__init__(**kwargs) self.additional_properties = additional_properties self.data_flow_name = data_flow_name @@ -13051,8 +17524,8 @@ def __init__( class DataFlowFolder(msrest.serialization.Model): """The folder that this data flow is in. If not specified, Data flow will appear at the root level. - :param name: The name of the folder that this data flow is in. - :type name: str + :ivar name: The name of the folder that this data flow is in. + :vartype name: str """ _attribute_map = { @@ -13065,6 +17538,10 @@ def __init__( name: Optional[str] = None, **kwargs ): + """ + :keyword name: The name of the folder that this data flow is in. + :paramtype name: str + """ super(DataFlowFolder, self).__init__(**kwargs) self.name = name @@ -13074,10 +17551,10 @@ class DataFlowListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of data flows. - :type value: list[~azure.mgmt.datafactory.models.DataFlowResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: Required. List of data flows. + :vartype value: list[~azure.mgmt.datafactory.models.DataFlowResource] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _validation = { @@ -13096,6 +17573,12 @@ def __init__( next_link: Optional[str] = None, **kwargs ): + """ + :keyword value: Required. List of data flows. + :paramtype value: list[~azure.mgmt.datafactory.models.DataFlowResource] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(DataFlowListResponse, self).__init__(**kwargs) self.value = value self.next_link = next_link @@ -13108,17 +17591,17 @@ class DataFlowReference(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] + :vartype additional_properties: dict[str, any] :ivar type: Data flow reference type. Has constant value: "DataFlowReference". :vartype type: str - :param reference_name: Required. Reference data flow name. - :type reference_name: str - :param dataset_parameters: Reference data flow parameters from dataset. - :type dataset_parameters: any - :param parameters: Data flow parameters. - :type parameters: dict[str, any] + :ivar reference_name: Required. Reference data flow name. + :vartype reference_name: str + :ivar dataset_parameters: Reference data flow parameters from dataset. + :vartype dataset_parameters: any + :ivar parameters: Data flow parameters. + :vartype parameters: dict[str, any] """ _validation = { @@ -13145,6 +17628,17 @@ def __init__( parameters: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword reference_name: Required. Reference data flow name. + :paramtype reference_name: str + :keyword dataset_parameters: Reference data flow parameters from dataset. + :paramtype dataset_parameters: any + :keyword parameters: Data flow parameters. + :paramtype parameters: dict[str, any] + """ super(DataFlowReference, self).__init__(**kwargs) self.additional_properties = additional_properties self.reference_name = reference_name @@ -13167,8 +17661,8 @@ class DataFlowResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Required. Data flow properties. - :type properties: ~azure.mgmt.datafactory.models.DataFlow + :ivar properties: Required. Data flow properties. + :vartype properties: ~azure.mgmt.datafactory.models.DataFlow """ _validation = { @@ -13193,6 +17687,10 @@ def __init__( properties: "DataFlow", **kwargs ): + """ + :keyword properties: Required. Data flow properties. + :paramtype properties: ~azure.mgmt.datafactory.models.DataFlow + """ super(DataFlowResource, self).__init__(**kwargs) self.properties = properties @@ -13202,16 +17700,16 @@ class Transformation(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. - :type description: str - :param dataset: Dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param linked_service: Linked service reference. - :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param flowlet: Flowlet Reference. - :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference + :ivar name: Required. Transformation name. + :vartype name: str + :ivar description: Transformation description. + :vartype description: str + :ivar dataset: Dataset reference. + :vartype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :ivar linked_service: Linked service reference. + :vartype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar flowlet: Flowlet Reference. + :vartype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference """ _validation = { @@ -13236,6 +17734,18 @@ def __init__( flowlet: Optional["DataFlowReference"] = None, **kwargs ): + """ + :keyword name: Required. Transformation name. + :paramtype name: str + :keyword description: Transformation description. + :paramtype description: str + :keyword dataset: Dataset reference. + :paramtype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :keyword linked_service: Linked service reference. + :paramtype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword flowlet: Flowlet Reference. + :paramtype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference + """ super(Transformation, self).__init__(**kwargs) self.name = name self.description = description @@ -13249,18 +17759,18 @@ class DataFlowSink(Transformation): All required parameters must be populated in order to send to Azure. - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. - :type description: str - :param dataset: Dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param linked_service: Linked service reference. - :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param flowlet: Flowlet Reference. - :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference - :param schema_linked_service: Schema linked service reference. - :type schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar name: Required. Transformation name. + :vartype name: str + :ivar description: Transformation description. + :vartype description: str + :ivar dataset: Dataset reference. + :vartype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :ivar linked_service: Linked service reference. + :vartype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar flowlet: Flowlet Reference. + :vartype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference + :ivar schema_linked_service: Schema linked service reference. + :vartype schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { @@ -13287,6 +17797,20 @@ def __init__( schema_linked_service: Optional["LinkedServiceReference"] = None, **kwargs ): + """ + :keyword name: Required. Transformation name. + :paramtype name: str + :keyword description: Transformation description. + :paramtype description: str + :keyword dataset: Dataset reference. + :paramtype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :keyword linked_service: Linked service reference. + :paramtype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword flowlet: Flowlet Reference. + :paramtype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference + :keyword schema_linked_service: Schema linked service reference. + :paramtype schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + """ super(DataFlowSink, self).__init__(name=name, description=description, dataset=dataset, linked_service=linked_service, flowlet=flowlet, **kwargs) self.schema_linked_service = schema_linked_service @@ -13296,18 +17820,18 @@ class DataFlowSource(Transformation): All required parameters must be populated in order to send to Azure. - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. - :type description: str - :param dataset: Dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param linked_service: Linked service reference. - :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param flowlet: Flowlet Reference. - :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference - :param schema_linked_service: Schema linked service reference. - :type schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar name: Required. Transformation name. + :vartype name: str + :ivar description: Transformation description. + :vartype description: str + :ivar dataset: Dataset reference. + :vartype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :ivar linked_service: Linked service reference. + :vartype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar flowlet: Flowlet Reference. + :vartype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference + :ivar schema_linked_service: Schema linked service reference. + :vartype schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { @@ -13334,6 +17858,20 @@ def __init__( schema_linked_service: Optional["LinkedServiceReference"] = None, **kwargs ): + """ + :keyword name: Required. Transformation name. + :paramtype name: str + :keyword description: Transformation description. + :paramtype description: str + :keyword dataset: Dataset reference. + :paramtype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :keyword linked_service: Linked service reference. + :paramtype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword flowlet: Flowlet Reference. + :paramtype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference + :keyword schema_linked_service: Schema linked service reference. + :paramtype schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + """ super(DataFlowSource, self).__init__(name=name, description=description, dataset=dataset, linked_service=linked_service, flowlet=flowlet, **kwargs) self.schema_linked_service = schema_linked_service @@ -13341,13 +17879,13 @@ def __init__( class DataFlowSourceSetting(msrest.serialization.Model): """Definition of data flow source setting for debug. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param source_name: The data flow source name. - :type source_name: str - :param row_limit: Defines the row limit of data flow source in debug. - :type row_limit: int + :vartype additional_properties: dict[str, any] + :ivar source_name: The data flow source name. + :vartype source_name: str + :ivar row_limit: Defines the row limit of data flow source in debug. + :vartype row_limit: int """ _attribute_map = { @@ -13364,6 +17902,15 @@ def __init__( row_limit: Optional[int] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_name: The data flow source name. + :paramtype source_name: str + :keyword row_limit: Defines the row limit of data flow source in debug. + :paramtype row_limit: int + """ super(DataFlowSourceSetting, self).__init__(**kwargs) self.additional_properties = additional_properties self.source_name = source_name @@ -13373,11 +17920,11 @@ def __init__( class DataFlowStagingInfo(msrest.serialization.Model): """Staging info for execute data flow activity. - :param linked_service: Staging linked service reference. - :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType + :ivar linked_service: Staging linked service reference. + :vartype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar folder_path: Folder path for staging blob. Type: string (or Expression with resultType string). - :type folder_path: any + :vartype folder_path: any """ _attribute_map = { @@ -13392,6 +17939,13 @@ def __init__( folder_path: Optional[Any] = None, **kwargs ): + """ + :keyword linked_service: Staging linked service reference. + :paramtype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword folder_path: Folder path for staging blob. Type: string (or Expression with resultType + string). + :paramtype folder_path: any + """ super(DataFlowStagingInfo, self).__init__(**kwargs) self.linked_service = linked_service self.folder_path = folder_path @@ -13402,43 +17956,43 @@ class DataLakeAnalyticsUSQLActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param script_path: Required. Case-sensitive path to folder that contains the U-SQL script. + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar script_path: Required. Case-sensitive path to folder that contains the U-SQL script. Type: string (or Expression with resultType string). - :type script_path: any - :param script_linked_service: Required. Script linked service reference. - :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param degree_of_parallelism: The maximum number of nodes simultaneously used to run the job. + :vartype script_path: any + :ivar script_linked_service: Required. Script linked service reference. + :vartype script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar degree_of_parallelism: The maximum number of nodes simultaneously used to run the job. Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. - :type degree_of_parallelism: any - :param priority: Determines which jobs out of all that are queued should be selected to run + :vartype degree_of_parallelism: any + :ivar priority: Determines which jobs out of all that are queued should be selected to run first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or Expression with resultType integer), minimum: 1. - :type priority: any - :param parameters: Parameters for U-SQL job request. - :type parameters: dict[str, any] - :param runtime_version: Runtime version of the U-SQL engine to use. Type: string (or Expression + :vartype priority: any + :ivar parameters: Parameters for U-SQL job request. + :vartype parameters: dict[str, any] + :ivar runtime_version: Runtime version of the U-SQL engine to use. Type: string (or Expression with resultType string). - :type runtime_version: any - :param compilation_mode: Compilation mode of U-SQL. Must be one of these values : Semantic, - Full and SingleBox. Type: string (or Expression with resultType string). - :type compilation_mode: any + :vartype runtime_version: any + :ivar compilation_mode: Compilation mode of U-SQL. Must be one of these values : Semantic, Full + and SingleBox. Type: string (or Expression with resultType string). + :vartype compilation_mode: any """ _validation = { @@ -13485,6 +18039,43 @@ def __init__( compilation_mode: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword script_path: Required. Case-sensitive path to folder that contains the U-SQL script. + Type: string (or Expression with resultType string). + :paramtype script_path: any + :keyword script_linked_service: Required. Script linked service reference. + :paramtype script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword degree_of_parallelism: The maximum number of nodes simultaneously used to run the job. + Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. + :paramtype degree_of_parallelism: any + :keyword priority: Determines which jobs out of all that are queued should be selected to run + first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or + Expression with resultType integer), minimum: 1. + :paramtype priority: any + :keyword parameters: Parameters for U-SQL job request. + :paramtype parameters: dict[str, any] + :keyword runtime_version: Runtime version of the U-SQL engine to use. Type: string (or + Expression with resultType string). + :paramtype runtime_version: any + :keyword compilation_mode: Compilation mode of U-SQL. Must be one of these values : Semantic, + Full and SingleBox. Type: string (or Expression with resultType string). + :paramtype compilation_mode: any + """ super(DataLakeAnalyticsUSQLActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'DataLakeAnalyticsU-SQL' # type: str self.script_path = script_path @@ -13501,15 +18092,15 @@ class DatasetCompression(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset compression. Type: string (or Expression with resultType string). - :type type: any - :param level: The dataset compression level. Type: string (or Expression with resultType + :vartype type: any + :ivar level: The dataset compression level. Type: string (or Expression with resultType string). - :type level: any + :vartype level: any """ _validation = { @@ -13530,6 +18121,17 @@ def __init__( level: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword type: Required. Type of dataset compression. Type: string (or Expression with + resultType string). + :paramtype type: any + :keyword level: The dataset compression level. Type: string (or Expression with resultType + string). + :paramtype level: any + """ super(DatasetCompression, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = type @@ -13539,10 +18141,10 @@ def __init__( class DatasetDataElement(msrest.serialization.Model): """Columns that define the structure of the dataset. - :param name: Name of the column. Type: string (or Expression with resultType string). - :type name: any - :param type: Type of the column. Type: string (or Expression with resultType string). - :type type: any + :ivar name: Name of the column. Type: string (or Expression with resultType string). + :vartype name: any + :ivar type: Type of the column. Type: string (or Expression with resultType string). + :vartype type: any """ _attribute_map = { @@ -13557,6 +18159,12 @@ def __init__( type: Optional[Any] = None, **kwargs ): + """ + :keyword name: Name of the column. Type: string (or Expression with resultType string). + :paramtype name: any + :keyword type: Type of the column. Type: string (or Expression with resultType string). + :paramtype type: any + """ super(DatasetDataElement, self).__init__(**kwargs) self.name = name self.type = type @@ -13567,10 +18175,10 @@ class DatasetDebugResource(SubResourceDebugResource): All required parameters must be populated in order to send to Azure. - :param name: The resource name. - :type name: str - :param properties: Required. Dataset properties. - :type properties: ~azure.mgmt.datafactory.models.Dataset + :ivar name: The resource name. + :vartype name: str + :ivar properties: Required. Dataset properties. + :vartype properties: ~azure.mgmt.datafactory.models.Dataset """ _validation = { @@ -13589,6 +18197,12 @@ def __init__( name: Optional[str] = None, **kwargs ): + """ + :keyword name: The resource name. + :paramtype name: str + :keyword properties: Required. Dataset properties. + :paramtype properties: ~azure.mgmt.datafactory.models.Dataset + """ super(DatasetDebugResource, self).__init__(name=name, **kwargs) self.properties = properties @@ -13596,8 +18210,8 @@ def __init__( class DatasetFolder(msrest.serialization.Model): """The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :param name: The name of the folder that this Dataset is in. - :type name: str + :ivar name: The name of the folder that this Dataset is in. + :vartype name: str """ _attribute_map = { @@ -13610,6 +18224,10 @@ def __init__( name: Optional[str] = None, **kwargs ): + """ + :keyword name: The name of the folder that this Dataset is in. + :paramtype name: str + """ super(DatasetFolder, self).__init__(**kwargs) self.name = name @@ -13619,10 +18237,10 @@ class DatasetListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of datasets. - :type value: list[~azure.mgmt.datafactory.models.DatasetResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: Required. List of datasets. + :vartype value: list[~azure.mgmt.datafactory.models.DatasetResource] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _validation = { @@ -13641,6 +18259,12 @@ def __init__( next_link: Optional[str] = None, **kwargs ): + """ + :keyword value: Required. List of datasets. + :paramtype value: list[~azure.mgmt.datafactory.models.DatasetResource] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(DatasetListResponse, self).__init__(**kwargs) self.value = value self.next_link = next_link @@ -13655,10 +18279,10 @@ class DatasetReference(msrest.serialization.Model): :ivar type: Dataset reference type. Has constant value: "DatasetReference". :vartype type: str - :param reference_name: Required. Reference dataset name. - :type reference_name: str - :param parameters: Arguments for dataset. - :type parameters: dict[str, any] + :ivar reference_name: Required. Reference dataset name. + :vartype reference_name: str + :ivar parameters: Arguments for dataset. + :vartype parameters: dict[str, any] """ _validation = { @@ -13681,6 +18305,12 @@ def __init__( parameters: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword reference_name: Required. Reference dataset name. + :paramtype reference_name: str + :keyword parameters: Arguments for dataset. + :paramtype parameters: dict[str, any] + """ super(DatasetReference, self).__init__(**kwargs) self.reference_name = reference_name self.parameters = parameters @@ -13701,8 +18331,8 @@ class DatasetResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Required. Dataset properties. - :type properties: ~azure.mgmt.datafactory.models.Dataset + :ivar properties: Required. Dataset properties. + :vartype properties: ~azure.mgmt.datafactory.models.Dataset """ _validation = { @@ -13727,6 +18357,10 @@ def __init__( properties: "Dataset", **kwargs ): + """ + :keyword properties: Required. Dataset properties. + :paramtype properties: ~azure.mgmt.datafactory.models.Dataset + """ super(DatasetResource, self).__init__(**kwargs) self.properties = properties @@ -13734,13 +18368,13 @@ def __init__( class DatasetSchemaDataElement(msrest.serialization.Model): """Columns that define the physical type schema of the dataset. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Name of the schema column. Type: string (or Expression with resultType string). - :type name: any - :param type: Type of the schema column. Type: string (or Expression with resultType string). - :type type: any + :vartype additional_properties: dict[str, any] + :ivar name: Name of the schema column. Type: string (or Expression with resultType string). + :vartype name: any + :ivar type: Type of the schema column. Type: string (or Expression with resultType string). + :vartype type: any """ _attribute_map = { @@ -13757,6 +18391,15 @@ def __init__( type: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Name of the schema column. Type: string (or Expression with resultType string). + :paramtype name: any + :keyword type: Type of the schema column. Type: string (or Expression with resultType string). + :paramtype type: any + """ super(DatasetSchemaDataElement, self).__init__(**kwargs) self.additional_properties = additional_properties self.name = name @@ -13768,48 +18411,48 @@ class Db2LinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: The connection string. It is mutually exclusive with server, - database, authenticationType, userName, packageCollection and certificateCommonName property. - Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param server: Server name for connection. It is mutually exclusive with connectionString + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: The connection string. It is mutually exclusive with server, database, + authenticationType, userName, packageCollection and certificateCommonName property. Type: + string, SecureString or AzureKeyVaultSecretReference. + :vartype connection_string: any + :ivar server: Server name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :type server: any - :param database: Database name for connection. It is mutually exclusive with connectionString + :vartype server: any + :ivar database: Database name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :type database: any - :param authentication_type: AuthenticationType to be used for connection. It is mutually + :vartype database: any + :ivar authentication_type: AuthenticationType to be used for connection. It is mutually exclusive with connectionString property. Possible values include: "Basic". - :type authentication_type: str or ~azure.mgmt.datafactory.models.Db2AuthenticationType - :param username: Username for authentication. It is mutually exclusive with connectionString + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.Db2AuthenticationType + :ivar username: Username for authentication. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :type username: any - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param package_collection: Under where packages are created when querying database. It is + :vartype username: any + :ivar password: Password for authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar package_collection: Under where packages are created when querying database. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :type package_collection: any - :param certificate_common_name: Certificate Common Name when TLS is enabled. It is mutually + :vartype package_collection: any + :ivar certificate_common_name: Certificate Common Name when TLS is enabled. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :type certificate_common_name: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype certificate_common_name: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -13853,6 +18496,48 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: The connection string. It is mutually exclusive with server, + database, authenticationType, userName, packageCollection and certificateCommonName property. + Type: string, SecureString or AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword server: Server name for connection. It is mutually exclusive with connectionString + property. Type: string (or Expression with resultType string). + :paramtype server: any + :keyword database: Database name for connection. It is mutually exclusive with connectionString + property. Type: string (or Expression with resultType string). + :paramtype database: any + :keyword authentication_type: AuthenticationType to be used for connection. It is mutually + exclusive with connectionString property. Possible values include: "Basic". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.Db2AuthenticationType + :keyword username: Username for authentication. It is mutually exclusive with connectionString + property. Type: string (or Expression with resultType string). + :paramtype username: any + :keyword password: Password for authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword package_collection: Under where packages are created when querying database. It is + mutually exclusive with connectionString property. Type: string (or Expression with resultType + string). + :paramtype package_collection: any + :keyword certificate_common_name: Certificate Common Name when TLS is enabled. It is mutually + exclusive with connectionString property. Type: string (or Expression with resultType string). + :paramtype certificate_common_name: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. It is mutually exclusive with + connectionString property. Type: string (or Expression with resultType string). + :paramtype encrypted_credential: any + """ super(Db2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Db2' # type: str self.connection_string = connection_string @@ -13871,31 +18556,31 @@ class Db2Source(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: Database query. Type: string (or Expression with resultType string). - :type query: any + :vartype additional_columns: any + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: any """ _validation = { @@ -13927,6 +18612,31 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: any + """ super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'Db2Source' # type: str self.query = query @@ -13937,36 +18647,36 @@ class Db2TableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param schema_type_properties_schema: The Db2 schema name. Type: string (or Expression with + :vartype table_name: any + :ivar schema_type_properties_schema: The Db2 schema name. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any - :param table: The Db2 table name. Type: string (or Expression with resultType string). - :type table: any + :vartype schema_type_properties_schema: any + :ivar table: The Db2 table name. Type: string (or Expression with resultType string). + :vartype table: any """ _validation = { @@ -14005,6 +18715,36 @@ def __init__( table: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword schema_type_properties_schema: The Db2 schema name. Type: string (or Expression with + resultType string). + :paramtype schema_type_properties_schema: any + :keyword table: The Db2 table name. Type: string (or Expression with resultType string). + :paramtype table: any + """ super(Db2TableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'Db2Table' # type: str self.table_name = table_name @@ -14017,39 +18757,39 @@ class DeleteActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param recursive: If true, files or sub-folders under current folder path will be deleted + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar recursive: If true, files or sub-folders under current folder path will be deleted recursively. Default is false. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param max_concurrent_connections: The max concurrent connections to connect data source at the + :vartype recursive: any + :ivar max_concurrent_connections: The max concurrent connections to connect data source at the same time. - :type max_concurrent_connections: int - :param enable_logging: Whether to record detailed logs of delete-activity execution. Default + :vartype max_concurrent_connections: int + :ivar enable_logging: Whether to record detailed logs of delete-activity execution. Default value is false. Type: boolean (or Expression with resultType boolean). - :type enable_logging: any - :param log_storage_settings: Log storage settings customer need to provide when enableLogging - is true. - :type log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings - :param dataset: Required. Delete activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param store_settings: Delete activity store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :vartype enable_logging: any + :ivar log_storage_settings: Log storage settings customer need to provide when enableLogging is + true. + :vartype log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings + :ivar dataset: Required. Delete activity dataset reference. + :vartype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :ivar store_settings: Delete activity store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings """ _validation = { @@ -14094,6 +18834,39 @@ def __init__( store_settings: Optional["StoreReadSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword recursive: If true, files or sub-folders under current folder path will be deleted + recursively. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword max_concurrent_connections: The max concurrent connections to connect data source at + the same time. + :paramtype max_concurrent_connections: int + :keyword enable_logging: Whether to record detailed logs of delete-activity execution. Default + value is false. Type: boolean (or Expression with resultType boolean). + :paramtype enable_logging: any + :keyword log_storage_settings: Log storage settings customer need to provide when enableLogging + is true. + :paramtype log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings + :keyword dataset: Required. Delete activity dataset reference. + :paramtype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :keyword store_settings: Delete activity store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ super(DeleteActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'Delete' # type: str self.recursive = recursive @@ -14107,8 +18880,8 @@ def __init__( class DeleteDataFlowDebugSessionRequest(msrest.serialization.Model): """Request body structure for deleting data flow debug session. - :param session_id: The ID of data flow debug session. - :type session_id: str + :ivar session_id: The ID of data flow debug session. + :vartype session_id: str """ _attribute_map = { @@ -14121,6 +18894,10 @@ def __init__( session_id: Optional[str] = None, **kwargs ): + """ + :keyword session_id: The ID of data flow debug session. + :paramtype session_id: str + """ super(DeleteDataFlowDebugSessionRequest, self).__init__(**kwargs) self.session_id = session_id @@ -14130,56 +18907,56 @@ class DelimitedTextDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param location: The location of the delimited text storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param column_delimiter: The column delimiter. Type: string (or Expression with resultType + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar location: The location of the delimited text storage. + :vartype location: ~azure.mgmt.datafactory.models.DatasetLocation + :ivar column_delimiter: The column delimiter. Type: string (or Expression with resultType string). - :type column_delimiter: any - :param row_delimiter: The row delimiter. Type: string (or Expression with resultType string). - :type row_delimiter: any - :param encoding_name: The code page name of the preferred encoding. If miss, the default value + :vartype column_delimiter: any + :ivar row_delimiter: The row delimiter. Type: string (or Expression with resultType string). + :vartype row_delimiter: any + :ivar encoding_name: The code page name of the preferred encoding. If miss, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). - :type encoding_name: any - :param compression_codec: The data compressionCodec. Type: string (or Expression with - resultType string). - :type compression_codec: any - :param compression_level: The data compression method used for DelimitedText. - :type compression_level: any - :param quote_char: The quote character. Type: string (or Expression with resultType string). - :type quote_char: any - :param escape_char: The escape character. Type: string (or Expression with resultType string). - :type escape_char: any - :param first_row_as_header: When used as input, treat the first row of data as headers. When + :vartype encoding_name: any + :ivar compression_codec: The data compressionCodec. Type: string (or Expression with resultType + string). + :vartype compression_codec: any + :ivar compression_level: The data compression method used for DelimitedText. + :vartype compression_level: any + :ivar quote_char: The quote character. Type: string (or Expression with resultType string). + :vartype quote_char: any + :ivar escape_char: The escape character. Type: string (or Expression with resultType string). + :vartype escape_char: any + :ivar first_row_as_header: When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). - :type first_row_as_header: any - :param null_value: The null value string. Type: string (or Expression with resultType string). - :type null_value: any + :vartype first_row_as_header: any + :ivar null_value: The null value string. Type: string (or Expression with resultType string). + :vartype null_value: any """ _validation = { @@ -14232,6 +19009,58 @@ def __init__( null_value: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword location: The location of the delimited text storage. + :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation + :keyword column_delimiter: The column delimiter. Type: string (or Expression with resultType + string). + :paramtype column_delimiter: any + :keyword row_delimiter: The row delimiter. Type: string (or Expression with resultType string). + :paramtype row_delimiter: any + :keyword encoding_name: The code page name of the preferred encoding. If miss, the default + value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the + table in the following link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + resultType string). + :paramtype encoding_name: any + :keyword compression_codec: The data compressionCodec. Type: string (or Expression with + resultType string). + :paramtype compression_codec: any + :keyword compression_level: The data compression method used for DelimitedText. + :paramtype compression_level: any + :keyword quote_char: The quote character. Type: string (or Expression with resultType string). + :paramtype quote_char: any + :keyword escape_char: The escape character. Type: string (or Expression with resultType + string). + :paramtype escape_char: any + :keyword first_row_as_header: When used as input, treat the first row of data as headers. When + used as output,write the headers into the output as the first row of data. The default value is + false. Type: boolean (or Expression with resultType boolean). + :paramtype first_row_as_header: any + :keyword null_value: The null value string. Type: string (or Expression with resultType + string). + :paramtype null_value: any + """ super(DelimitedTextDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'DelimitedText' # type: str self.location = location @@ -14251,16 +19080,16 @@ class DelimitedTextReadSettings(FormatReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param skip_line_count: Indicates the number of non-empty rows to skip when reading data from + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar skip_line_count: Indicates the number of non-empty rows to skip when reading data from input files. Type: integer (or Expression with resultType integer). - :type skip_line_count: any - :param compression_properties: Compression settings. - :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings + :vartype skip_line_count: any + :ivar compression_properties: Compression settings. + :vartype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings """ _validation = { @@ -14282,6 +19111,16 @@ def __init__( compression_properties: Optional["CompressionReadSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword skip_line_count: Indicates the number of non-empty rows to skip when reading data from + input files. Type: integer (or Expression with resultType integer). + :paramtype skip_line_count: any + :keyword compression_properties: Compression settings. + :paramtype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings + """ super(DelimitedTextReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'DelimitedTextReadSettings' # type: str self.skip_line_count = skip_line_count @@ -14293,33 +19132,33 @@ class DelimitedTextSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: DelimitedText store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: DelimitedText format settings. - :type format_settings: ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings + :vartype disable_metrics_collection: any + :ivar store_settings: DelimitedText store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :ivar format_settings: DelimitedText format settings. + :vartype format_settings: ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings """ _validation = { @@ -14353,6 +19192,33 @@ def __init__( format_settings: Optional["DelimitedTextWriteSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: DelimitedText store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :keyword format_settings: DelimitedText format settings. + :paramtype format_settings: ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings + """ super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DelimitedTextSink' # type: str self.store_settings = store_settings @@ -14364,30 +19230,30 @@ class DelimitedTextSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: DelimitedText store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param format_settings: DelimitedText format settings. - :type format_settings: ~azure.mgmt.datafactory.models.DelimitedTextReadSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype disable_metrics_collection: any + :ivar store_settings: DelimitedText store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :ivar format_settings: DelimitedText format settings. + :vartype format_settings: ~azure.mgmt.datafactory.models.DelimitedTextReadSettings + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -14419,6 +19285,30 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: DelimitedText store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :keyword format_settings: DelimitedText format settings. + :paramtype format_settings: ~azure.mgmt.datafactory.models.DelimitedTextReadSettings + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DelimitedTextSource' # type: str self.store_settings = store_settings @@ -14431,24 +19321,24 @@ class DelimitedTextWriteSettings(FormatWriteSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param quote_all_text: Indicates whether string values should always be enclosed with quotes. + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str + :ivar quote_all_text: Indicates whether string values should always be enclosed with quotes. Type: boolean (or Expression with resultType boolean). - :type quote_all_text: any - :param file_extension: Required. The file extension used to create the files. Type: string (or + :vartype quote_all_text: any + :ivar file_extension: Required. The file extension used to create the files. Type: string (or Expression with resultType string). - :type file_extension: any - :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + :vartype file_extension: any + :ivar max_rows_per_file: Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). - :type max_rows_per_file: any - :param file_name_prefix: Specifies the file name pattern + :vartype max_rows_per_file: any + :ivar file_name_prefix: Specifies the file name pattern :code:``_:code:``.:code:`` when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). - :type file_name_prefix: any + :vartype file_name_prefix: any """ _validation = { @@ -14475,6 +19365,24 @@ def __init__( file_name_prefix: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword quote_all_text: Indicates whether string values should always be enclosed with quotes. + Type: boolean (or Expression with resultType boolean). + :paramtype quote_all_text: any + :keyword file_extension: Required. The file extension used to create the files. Type: string + (or Expression with resultType string). + :paramtype file_extension: any + :keyword max_rows_per_file: Limit the written file's row count to be smaller than or equal to + the specified count. Type: integer (or Expression with resultType integer). + :paramtype max_rows_per_file: any + :keyword file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :paramtype file_name_prefix: any + """ super(DelimitedTextWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'DelimitedTextWriteSettings' # type: str self.quote_all_text = quote_all_text @@ -14491,8 +19399,8 @@ class DependencyReference(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. The type of dependency reference.Constant filled by server. - :type type: str + :ivar type: Required. The type of dependency reference.Constant filled by server. + :vartype type: str """ _validation = { @@ -14511,6 +19419,8 @@ def __init__( self, **kwargs ): + """ + """ super(DependencyReference, self).__init__(**kwargs) self.type = None # type: Optional[str] @@ -14520,16 +19430,16 @@ class DistcpSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param resource_manager_endpoint: Required. Specifies the Yarn ResourceManager endpoint. Type: + :ivar resource_manager_endpoint: Required. Specifies the Yarn ResourceManager endpoint. Type: string (or Expression with resultType string). - :type resource_manager_endpoint: any - :param temp_script_path: Required. Specifies an existing folder path which will be used to - store temp Distcp command script. The script file is generated by ADF and will be removed after - Copy job finished. Type: string (or Expression with resultType string). - :type temp_script_path: any - :param distcp_options: Specifies the Distcp options. Type: string (or Expression with - resultType string). - :type distcp_options: any + :vartype resource_manager_endpoint: any + :ivar temp_script_path: Required. Specifies an existing folder path which will be used to store + temp Distcp command script. The script file is generated by ADF and will be removed after Copy + job finished. Type: string (or Expression with resultType string). + :vartype temp_script_path: any + :ivar distcp_options: Specifies the Distcp options. Type: string (or Expression with resultType + string). + :vartype distcp_options: any """ _validation = { @@ -14551,6 +19461,18 @@ def __init__( distcp_options: Optional[Any] = None, **kwargs ): + """ + :keyword resource_manager_endpoint: Required. Specifies the Yarn ResourceManager endpoint. + Type: string (or Expression with resultType string). + :paramtype resource_manager_endpoint: any + :keyword temp_script_path: Required. Specifies an existing folder path which will be used to + store temp Distcp command script. The script file is generated by ADF and will be removed after + Copy job finished. Type: string (or Expression with resultType string). + :paramtype temp_script_path: any + :keyword distcp_options: Specifies the Distcp options. Type: string (or Expression with + resultType string). + :paramtype distcp_options: any + """ super(DistcpSettings, self).__init__(**kwargs) self.resource_manager_endpoint = resource_manager_endpoint self.temp_script_path = temp_script_path @@ -14562,31 +19484,31 @@ class DocumentDbCollectionDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param collection_name: Required. Document Database collection name. Type: string (or - Expression with resultType string). - :type collection_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar collection_name: Required. Document Database collection name. Type: string (or Expression + with resultType string). + :vartype collection_name: any """ _validation = { @@ -14622,6 +19544,31 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword collection_name: Required. Document Database collection name. Type: string (or + Expression with resultType string). + :paramtype collection_name: any + """ super(DocumentDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'DocumentDbCollection' # type: str self.collection_name = collection_name @@ -14632,35 +19579,35 @@ class DocumentDbCollectionSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or + :vartype disable_metrics_collection: any + :ivar nesting_separator: Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). - :type nesting_separator: any - :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or + :vartype nesting_separator: any + :ivar write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. - :type write_behavior: any + :vartype write_behavior: any """ _validation = { @@ -14694,6 +19641,35 @@ def __init__( write_behavior: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword nesting_separator: Nested properties separator. Default is . (dot). Type: string (or + Expression with resultType string). + :paramtype nesting_separator: any + :keyword write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or + Expression with resultType string). Allowed values: insert and upsert. + :paramtype write_behavior: any + """ super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DocumentDbCollectionSink' # type: str self.nesting_separator = nesting_separator @@ -14705,34 +19681,34 @@ class DocumentDbCollectionSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: Documents query. Type: string (or Expression with resultType string). - :type query: any - :param nesting_separator: Nested properties separator. Type: string (or Expression with + :vartype disable_metrics_collection: any + :ivar query: Documents query. Type: string (or Expression with resultType string). + :vartype query: any + :ivar nesting_separator: Nested properties separator. Type: string (or Expression with resultType string). - :type nesting_separator: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype nesting_separator: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -14766,6 +19742,34 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: Documents query. Type: string (or Expression with resultType string). + :paramtype query: any + :keyword nesting_separator: Nested properties separator. Type: string (or Expression with + resultType string). + :paramtype nesting_separator: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DocumentDbCollectionSource' # type: str self.query = query @@ -14779,28 +19783,28 @@ class DrillLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: An ODBC connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_string: any + :ivar pwd: The Azure key vault secret reference of password in connection string. + :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -14832,6 +19836,28 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword pwd: The Azure key vault secret reference of password in connection string. + :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(DrillLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Drill' # type: str self.connection_string = connection_string @@ -14844,32 +19870,32 @@ class DrillSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -14901,6 +19927,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'DrillSource' # type: str self.query = query @@ -14911,36 +19963,36 @@ class DrillTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param table: The table name of the Drill. Type: string (or Expression with resultType string). - :type table: any - :param schema_type_properties_schema: The schema name of the Drill. Type: string (or Expression + :vartype table_name: any + :ivar table: The table name of the Drill. Type: string (or Expression with resultType string). + :vartype table: any + :ivar schema_type_properties_schema: The schema name of the Drill. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any + :vartype schema_type_properties_schema: any """ _validation = { @@ -14979,6 +20031,37 @@ def __init__( schema_type_properties_schema: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword table: The table name of the Drill. Type: string (or Expression with resultType + string). + :paramtype table: any + :keyword schema_type_properties_schema: The schema name of the Drill. Type: string (or + Expression with resultType string). + :paramtype schema_type_properties_schema: any + """ super(DrillTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'DrillTable' # type: str self.table_name = table_name @@ -14989,11 +20072,11 @@ def __init__( class DWCopyCommandDefaultValue(msrest.serialization.Model): """Default value. - :param column_name: Column name. Type: object (or Expression with resultType string). - :type column_name: any - :param default_value: The default value of the column. Type: object (or Expression with + :ivar column_name: Column name. Type: object (or Expression with resultType string). + :vartype column_name: any + :ivar default_value: The default value of the column. Type: object (or Expression with resultType string). - :type default_value: any + :vartype default_value: any """ _attribute_map = { @@ -15008,6 +20091,13 @@ def __init__( default_value: Optional[Any] = None, **kwargs ): + """ + :keyword column_name: Column name. Type: object (or Expression with resultType string). + :paramtype column_name: any + :keyword default_value: The default value of the column. Type: object (or Expression with + resultType string). + :paramtype default_value: any + """ super(DWCopyCommandDefaultValue, self).__init__(**kwargs) self.column_name = column_name self.default_value = default_value @@ -15016,15 +20106,15 @@ def __init__( class DWCopyCommandSettings(msrest.serialization.Model): """DW Copy Command settings. - :param default_values: Specifies the default values for each target column in SQL DW. The + :ivar default_values: Specifies the default values for each target column in SQL DW. The default values in the property overwrite the DEFAULT constraint set in the DB, and identity column cannot have a default value. Type: array of objects (or Expression with resultType array of objects). - :type default_values: list[~azure.mgmt.datafactory.models.DWCopyCommandDefaultValue] - :param additional_options: Additional options directly passed to SQL DW in Copy Command. Type: + :vartype default_values: list[~azure.mgmt.datafactory.models.DWCopyCommandDefaultValue] + :ivar additional_options: Additional options directly passed to SQL DW in Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalOptions": { "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" }. - :type additional_options: dict[str, str] + :vartype additional_options: dict[str, str] """ _attribute_map = { @@ -15039,6 +20129,17 @@ def __init__( additional_options: Optional[Dict[str, str]] = None, **kwargs ): + """ + :keyword default_values: Specifies the default values for each target column in SQL DW. The + default values in the property overwrite the DEFAULT constraint set in the DB, and identity + column cannot have a default value. Type: array of objects (or Expression with resultType array + of objects). + :paramtype default_values: list[~azure.mgmt.datafactory.models.DWCopyCommandDefaultValue] + :keyword additional_options: Additional options directly passed to SQL DW in Copy Command. + Type: key value pairs (value should be string type) (or Expression with resultType object). + Example: "additionalOptions": { "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" }. + :paramtype additional_options: dict[str, str] + """ super(DWCopyCommandSettings, self).__init__(**kwargs) self.default_values = default_values self.additional_options = additional_options @@ -15049,40 +20150,40 @@ class DynamicsAXLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param url: Required. The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar url: Required. The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData endpoint. - :type url: any - :param service_principal_id: Required. Specify the application's client ID. Type: string (or + :vartype url: any + :ivar service_principal_id: Required. Specify the application's client ID. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: Required. Specify the application's key. Mark this field as a + :vartype service_principal_id: any + :ivar service_principal_key: Required. Specify the application's key. Mark this field as a SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key Vault. Type: string (or Expression with resultType string). - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. Specify the tenant information (domain name or tenant ID) under which + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: Required. Specify the tenant information (domain name or tenant ID) under which your application resides. Retrieve it by hovering the mouse in the top-right corner of the Azure portal. Type: string (or Expression with resultType string). - :type tenant: any - :param aad_resource_id: Required. Specify the resource you are requesting authorization. Type: + :vartype tenant: any + :ivar aad_resource_id: Required. Specify the resource you are requesting authorization. Type: string (or Expression with resultType string). - :type aad_resource_id: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype aad_resource_id: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -15125,6 +20226,40 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword url: Required. The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData + endpoint. + :paramtype url: any + :keyword service_principal_id: Required. Specify the application's client ID. Type: string (or + Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: Required. Specify the application's key. Mark this field as a + SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key + Vault. Type: string (or Expression with resultType string). + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: Required. Specify the tenant information (domain name or tenant ID) under + which your application resides. Retrieve it by hovering the mouse in the top-right corner of + the Azure portal. Type: string (or Expression with resultType string). + :paramtype tenant: any + :keyword aad_resource_id: Required. Specify the resource you are requesting authorization. + Type: string (or Expression with resultType string). + :paramtype aad_resource_id: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(DynamicsAXLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'DynamicsAX' # type: str self.url = url @@ -15140,31 +20275,31 @@ class DynamicsAXResourceDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression with resultType string). - :type path: any + :vartype path: any """ _validation = { @@ -15200,6 +20335,31 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression + with resultType string). + :paramtype path: any + """ super(DynamicsAXResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'DynamicsAXResource' # type: str self.path = path @@ -15210,37 +20370,37 @@ class DynamicsAXSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + :vartype query: any + :ivar http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: any + :vartype http_request_timeout: any """ _validation = { @@ -15274,6 +20434,37 @@ def __init__( http_request_timeout: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + :keyword http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the + timeout to get a response, not the timeout to read response data. Default value: 00:05:00. + Type: string (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype http_request_timeout: any + """ super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'DynamicsAXSource' # type: str self.query = query @@ -15285,31 +20476,31 @@ class DynamicsCrmEntityDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param entity_name: The logical name of the entity. Type: string (or Expression with resultType + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar entity_name: The logical name of the entity. Type: string (or Expression with resultType string). - :type entity_name: any + :vartype entity_name: any """ _validation = { @@ -15344,6 +20535,31 @@ def __init__( entity_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword entity_name: The logical name of the entity. Type: string (or Expression with + resultType string). + :paramtype entity_name: any + """ super(DynamicsCrmEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'DynamicsCrmEntity' # type: str self.entity_name = entity_name @@ -15354,65 +20570,64 @@ class DynamicsCrmLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' - for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: - string (or Expression with resultType string). - :type deployment_type: any - :param host_name: The host name of the on-premises Dynamics CRM server. The property is - required for on-prem and not allowed for online. Type: string (or Expression with resultType - string). - :type host_name: any - :param port: The port of on-premises Dynamics CRM server. The property is required for on-prem + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' for + Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: string + (or Expression with resultType string). + :vartype deployment_type: any + :ivar host_name: The host name of the on-premises Dynamics CRM server. The property is required + for on-prem and not allowed for online. Type: string (or Expression with resultType string). + :vartype host_name: any + :ivar port: The port of on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: any - :param service_uri: The URL to the Microsoft Dynamics CRM server. The property is required for + :vartype port: any + :ivar service_uri: The URL to the Microsoft Dynamics CRM server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type service_uri: any - :param organization_name: The organization name of the Dynamics CRM instance. The property is + :vartype service_uri: any + :ivar organization_name: The organization name of the Dynamics CRM instance. The property is required for on-prem and required for online when there are more than one Dynamics CRM instances associated with the user. Type: string (or Expression with resultType string). - :type organization_name: any - :param authentication_type: Required. The authentication type to connect to Dynamics CRM - server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, - 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). - :type authentication_type: any - :param username: User name to access the Dynamics CRM instance. Type: string (or Expression - with resultType string). - :type username: any - :param password: Password to access the Dynamics CRM instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_credential_type: The service principal credential type to use in + :vartype organization_name: any + :ivar authentication_type: Required. The authentication type to connect to Dynamics CRM server. + 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' + for Server-To-Server authentication in online scenario. Type: string (or Expression with + resultType string). + :vartype authentication_type: any + :ivar username: User name to access the Dynamics CRM instance. Type: string (or Expression with + resultType string). + :vartype username: any + :ivar password: Password to access the Dynamics CRM instance. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar service_principal_id: The client ID of the application in Azure Active Directory used for + Server-To-Server authentication. Type: string (or Expression with resultType string). + :vartype service_principal_id: any + :ivar service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). - :type service_principal_credential_type: any - :param service_principal_credential: The credential of the service principal object in Azure + :vartype service_principal_credential_type: any + :ivar service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. - :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -15464,6 +20679,65 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' + for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: + string (or Expression with resultType string). + :paramtype deployment_type: any + :keyword host_name: The host name of the on-premises Dynamics CRM server. The property is + required for on-prem and not allowed for online. Type: string (or Expression with resultType + string). + :paramtype host_name: any + :keyword port: The port of on-premises Dynamics CRM server. The property is required for + on-prem and not allowed for online. Default is 443. Type: integer (or Expression with + resultType integer), minimum: 0. + :paramtype port: any + :keyword service_uri: The URL to the Microsoft Dynamics CRM server. The property is required + for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). + :paramtype service_uri: any + :keyword organization_name: The organization name of the Dynamics CRM instance. The property is + required for on-prem and required for online when there are more than one Dynamics CRM + instances associated with the user. Type: string (or Expression with resultType string). + :paramtype organization_name: any + :keyword authentication_type: Required. The authentication type to connect to Dynamics CRM + server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, + 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or + Expression with resultType string). + :paramtype authentication_type: any + :keyword username: User name to access the Dynamics CRM instance. Type: string (or Expression + with resultType string). + :paramtype username: any + :keyword password: Password to access the Dynamics CRM instance. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword service_principal_id: The client ID of the application in Azure Active Directory used + for Server-To-Server authentication. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). + :paramtype service_principal_credential_type: any + :keyword service_principal_credential: The credential of the service principal object in Azure + Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', + servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If + servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only + be AzureKeyVaultSecretReference. + :paramtype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(DynamicsCrmLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'DynamicsCrm' # type: str self.deployment_type = deployment_type @@ -15485,39 +20759,39 @@ class DynamicsCrmSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param write_behavior: Required. The write behavior for the operation. Possible values include: + :vartype disable_metrics_collection: any + :ivar write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". - :type write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior - :param ignore_null_values: The flag indicating whether to ignore null values from input dataset + :vartype write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior + :ivar ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: any - :param alternate_key_name: The logical name of the alternate key which will be used when + :vartype ignore_null_values: any + :ivar alternate_key_name: The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). - :type alternate_key_name: any + :vartype alternate_key_name: any """ _validation = { @@ -15554,6 +20828,39 @@ def __init__( alternate_key_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword write_behavior: Required. The write behavior for the operation. Possible values + include: "Upsert". + :paramtype write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior + :keyword ignore_null_values: The flag indicating whether to ignore null values from input + dataset (except key fields) during write operation. Default is false. Type: boolean (or + Expression with resultType boolean). + :paramtype ignore_null_values: any + :keyword alternate_key_name: The logical name of the alternate key which will be used when + upserting records. Type: string (or Expression with resultType string). + :paramtype alternate_key_name: any + """ super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DynamicsCrmSink' # type: str self.write_behavior = write_behavior @@ -15566,29 +20873,29 @@ class DynamicsCrmSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM + :vartype disable_metrics_collection: any + :ivar query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). - :type query: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -15618,6 +20925,29 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM + (online & on-premises). Type: string (or Expression with resultType string). + :paramtype query: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DynamicsCrmSource' # type: str self.query = query @@ -15629,31 +20959,31 @@ class DynamicsEntityDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param entity_name: The logical name of the entity. Type: string (or Expression with resultType + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar entity_name: The logical name of the entity. Type: string (or Expression with resultType string). - :type entity_name: any + :vartype entity_name: any """ _validation = { @@ -15688,6 +21018,31 @@ def __init__( entity_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword entity_name: The logical name of the entity. Type: string (or Expression with + resultType string). + :paramtype entity_name: any + """ super(DynamicsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'DynamicsEntity' # type: str self.entity_name = entity_name @@ -15698,64 +21053,64 @@ class DynamicsLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or Expression with resultType string). - :type deployment_type: any - :param host_name: The host name of the on-premises Dynamics server. The property is required - for on-prem and not allowed for online. Type: string (or Expression with resultType string). - :type host_name: any - :param port: The port of on-premises Dynamics server. The property is required for on-prem and + :vartype deployment_type: any + :ivar host_name: The host name of the on-premises Dynamics server. The property is required for + on-prem and not allowed for online. Type: string (or Expression with resultType string). + :vartype host_name: any + :ivar port: The port of on-premises Dynamics server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: any - :param service_uri: The URL to the Microsoft Dynamics server. The property is required for + :vartype port: any + :ivar service_uri: The URL to the Microsoft Dynamics server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type service_uri: any - :param organization_name: The organization name of the Dynamics instance. The property is + :vartype service_uri: any + :ivar organization_name: The organization name of the Dynamics instance. The property is required for on-prem and required for online when there are more than one Dynamics instances associated with the user. Type: string (or Expression with resultType string). - :type organization_name: any - :param authentication_type: Required. The authentication type to connect to Dynamics server. + :vartype organization_name: any + :ivar authentication_type: Required. The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). - :type authentication_type: any - :param username: User name to access the Dynamics instance. Type: string (or Expression with + :vartype authentication_type: any + :ivar username: User name to access the Dynamics instance. Type: string (or Expression with resultType string). - :type username: any - :param password: Password to access the Dynamics instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_credential_type: The service principal credential type to use in + :vartype username: any + :ivar password: Password to access the Dynamics instance. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar service_principal_id: The client ID of the application in Azure Active Directory used for + Server-To-Server authentication. Type: string (or Expression with resultType string). + :vartype service_principal_id: any + :ivar service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). - :type service_principal_credential_type: str - :param service_principal_credential: The credential of the service principal object in Azure + :vartype service_principal_credential_type: str + :ivar service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. - :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -15807,6 +21162,64 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for + Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or + Expression with resultType string). + :paramtype deployment_type: any + :keyword host_name: The host name of the on-premises Dynamics server. The property is required + for on-prem and not allowed for online. Type: string (or Expression with resultType string). + :paramtype host_name: any + :keyword port: The port of on-premises Dynamics server. The property is required for on-prem + and not allowed for online. Default is 443. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype port: any + :keyword service_uri: The URL to the Microsoft Dynamics server. The property is required for + on-line and not allowed for on-prem. Type: string (or Expression with resultType string). + :paramtype service_uri: any + :keyword organization_name: The organization name of the Dynamics instance. The property is + required for on-prem and required for online when there are more than one Dynamics instances + associated with the user. Type: string (or Expression with resultType string). + :paramtype organization_name: any + :keyword authentication_type: Required. The authentication type to connect to Dynamics server. + 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' + for Server-To-Server authentication in online scenario. Type: string (or Expression with + resultType string). + :paramtype authentication_type: any + :keyword username: User name to access the Dynamics instance. Type: string (or Expression with + resultType string). + :paramtype username: any + :keyword password: Password to access the Dynamics instance. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword service_principal_id: The client ID of the application in Azure Active Directory used + for Server-To-Server authentication. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). + :paramtype service_principal_credential_type: str + :keyword service_principal_credential: The credential of the service principal object in Azure + Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', + servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If + servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only + be AzureKeyVaultSecretReference. + :paramtype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(DynamicsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Dynamics' # type: str self.deployment_type = deployment_type @@ -15828,39 +21241,39 @@ class DynamicsSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param write_behavior: Required. The write behavior for the operation. Possible values include: + :vartype disable_metrics_collection: any + :ivar write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". - :type write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior - :param ignore_null_values: The flag indicating whether ignore null values from input dataset + :vartype write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior + :ivar ignore_null_values: The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: any - :param alternate_key_name: The logical name of the alternate key which will be used when + :vartype ignore_null_values: any + :ivar alternate_key_name: The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). - :type alternate_key_name: any + :vartype alternate_key_name: any """ _validation = { @@ -15897,6 +21310,39 @@ def __init__( alternate_key_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword write_behavior: Required. The write behavior for the operation. Possible values + include: "Upsert". + :paramtype write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior + :keyword ignore_null_values: The flag indicating whether ignore null values from input dataset + (except key fields) during write operation. Default is false. Type: boolean (or Expression with + resultType boolean). + :paramtype ignore_null_values: any + :keyword alternate_key_name: The logical name of the alternate key which will be used when + upserting records. Type: string (or Expression with resultType string). + :paramtype alternate_key_name: any + """ super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DynamicsSink' # type: str self.write_behavior = write_behavior @@ -15909,29 +21355,29 @@ class DynamicsSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics + :vartype disable_metrics_collection: any + :ivar query: FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). - :type query: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -15961,6 +21407,29 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: FetchXML is a proprietary query language that is used in Microsoft Dynamics + (online & on-premises). Type: string (or Expression with resultType string). + :paramtype query: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DynamicsSource' # type: str self.query = query @@ -15972,40 +21441,40 @@ class EloquaLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param endpoint: Required. The endpoint of the Eloqua server. (i.e. eloqua.example.com). - :type endpoint: any - :param username: Required. The site name and user name of your Eloqua account in the form: + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar endpoint: Required. The endpoint of the Eloqua server. (i.e. eloqua.example.com). + :vartype endpoint: any + :ivar username: Required. The site name and user name of your Eloqua account in the form: sitename/username. (i.e. Eloqua/Alice). - :type username: any - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype username: any + :ivar password: The password corresponding to the user name. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -16047,6 +21516,40 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword endpoint: Required. The endpoint of the Eloqua server. (i.e. eloqua.example.com). + :paramtype endpoint: any + :keyword username: Required. The site name and user name of your Eloqua account in the form: + sitename/username. (i.e. Eloqua/Alice). + :paramtype username: any + :keyword password: The password corresponding to the user name. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(EloquaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Eloqua' # type: str self.endpoint = endpoint @@ -16063,30 +21566,30 @@ class EloquaObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -16121,6 +21624,30 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(EloquaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'EloquaObject' # type: str self.table_name = table_name @@ -16131,32 +21658,32 @@ class EloquaSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -16188,6 +21715,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'EloquaSource' # type: str self.query = query @@ -16198,17 +21751,17 @@ class EncryptionConfiguration(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param key_name: Required. The name of the key in Azure Key Vault to use as Customer Managed + :ivar key_name: Required. The name of the key in Azure Key Vault to use as Customer Managed Key. - :type key_name: str - :param vault_base_url: Required. The url of the Azure Key Vault used for CMK. - :type vault_base_url: str - :param key_version: The version of the key used for CMK. If not provided, latest version will - be used. - :type key_version: str - :param identity: User assigned identity to use to authenticate to customer's key vault. If not + :vartype key_name: str + :ivar vault_base_url: Required. The url of the Azure Key Vault used for CMK. + :vartype vault_base_url: str + :ivar key_version: The version of the key used for CMK. If not provided, latest version will be + used. + :vartype key_version: str + :ivar identity: User assigned identity to use to authenticate to customer's key vault. If not provided Managed Service Identity will be used. - :type identity: ~azure.mgmt.datafactory.models.CMKIdentityDefinition + :vartype identity: ~azure.mgmt.datafactory.models.CMKIdentityDefinition """ _validation = { @@ -16232,6 +21785,19 @@ def __init__( identity: Optional["CMKIdentityDefinition"] = None, **kwargs ): + """ + :keyword key_name: Required. The name of the key in Azure Key Vault to use as Customer Managed + Key. + :paramtype key_name: str + :keyword vault_base_url: Required. The url of the Azure Key Vault used for CMK. + :paramtype vault_base_url: str + :keyword key_version: The version of the key used for CMK. If not provided, latest version will + be used. + :paramtype key_version: str + :keyword identity: User assigned identity to use to authenticate to customer's key vault. If + not provided Managed Service Identity will be used. + :paramtype identity: ~azure.mgmt.datafactory.models.CMKIdentityDefinition + """ super(EncryptionConfiguration, self).__init__(**kwargs) self.key_name = key_name self.vault_base_url = vault_base_url @@ -16242,11 +21808,11 @@ def __init__( class EntityReference(msrest.serialization.Model): """The entity reference. - :param type: The type of this referenced entity. Possible values include: + :ivar type: The type of this referenced entity. Possible values include: "IntegrationRuntimeReference", "LinkedServiceReference". - :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType - :param reference_name: The name of this referenced entity. - :type reference_name: str + :vartype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType + :ivar reference_name: The name of this referenced entity. + :vartype reference_name: str """ _attribute_map = { @@ -16261,6 +21827,13 @@ def __init__( reference_name: Optional[str] = None, **kwargs ): + """ + :keyword type: The type of this referenced entity. Possible values include: + "IntegrationRuntimeReference", "LinkedServiceReference". + :paramtype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType + :keyword reference_name: The name of this referenced entity. + :paramtype reference_name: str + """ super(EntityReference, self).__init__(**kwargs) self.type = type self.reference_name = reference_name @@ -16271,12 +21844,12 @@ class EnvironmentVariableSetup(CustomSetupBase): All required parameters must be populated in order to send to Azure. - :param type: Required. The type of custom setup.Constant filled by server. - :type type: str - :param variable_name: Required. The name of the environment variable. - :type variable_name: str - :param variable_value: Required. The value of the environment variable. - :type variable_value: str + :ivar type: Required. The type of custom setup.Constant filled by server. + :vartype type: str + :ivar variable_name: Required. The name of the environment variable. + :vartype variable_name: str + :ivar variable_value: Required. The value of the environment variable. + :vartype variable_value: str """ _validation = { @@ -16298,6 +21871,12 @@ def __init__( variable_value: str, **kwargs ): + """ + :keyword variable_name: Required. The name of the environment variable. + :paramtype variable_name: str + :keyword variable_value: Required. The value of the environment variable. + :paramtype variable_value: str + """ super(EnvironmentVariableSetup, self).__init__(**kwargs) self.type = 'EnvironmentVariableSetup' # type: str self.variable_name = variable_name @@ -16309,47 +21888,47 @@ class ExcelDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param location: The location of the excel storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param sheet_name: The sheet name of excel file. Type: string (or Expression with resultType + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar location: The location of the excel storage. + :vartype location: ~azure.mgmt.datafactory.models.DatasetLocation + :ivar sheet_name: The sheet name of excel file. Type: string (or Expression with resultType string). - :type sheet_name: any - :param sheet_index: The sheet index of excel file and default value is 0. Type: integer (or + :vartype sheet_name: any + :ivar sheet_index: The sheet index of excel file and default value is 0. Type: integer (or Expression with resultType integer). - :type sheet_index: any - :param range: The partial data of one sheet. Type: string (or Expression with resultType + :vartype sheet_index: any + :ivar range: The partial data of one sheet. Type: string (or Expression with resultType string). - :type range: any - :param first_row_as_header: When used as input, treat the first row of data as headers. When + :vartype range: any + :ivar first_row_as_header: When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). - :type first_row_as_header: any - :param compression: The data compression method used for the json dataset. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - :param null_value: The null value string. Type: string (or Expression with resultType string). - :type null_value: any + :vartype first_row_as_header: any + :ivar compression: The data compression method used for the json dataset. + :vartype compression: ~azure.mgmt.datafactory.models.DatasetCompression + :ivar null_value: The null value string. Type: string (or Expression with resultType string). + :vartype null_value: any """ _validation = { @@ -16396,6 +21975,48 @@ def __init__( null_value: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword location: The location of the excel storage. + :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation + :keyword sheet_name: The sheet name of excel file. Type: string (or Expression with resultType + string). + :paramtype sheet_name: any + :keyword sheet_index: The sheet index of excel file and default value is 0. Type: integer (or + Expression with resultType integer). + :paramtype sheet_index: any + :keyword range: The partial data of one sheet. Type: string (or Expression with resultType + string). + :paramtype range: any + :keyword first_row_as_header: When used as input, treat the first row of data as headers. When + used as output,write the headers into the output as the first row of data. The default value is + false. Type: boolean (or Expression with resultType boolean). + :paramtype first_row_as_header: any + :keyword compression: The data compression method used for the json dataset. + :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression + :keyword null_value: The null value string. Type: string (or Expression with resultType + string). + :paramtype null_value: any + """ super(ExcelDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'Excel' # type: str self.location = location @@ -16412,28 +22033,28 @@ class ExcelSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: Excel store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype disable_metrics_collection: any + :ivar store_settings: Excel store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -16463,6 +22084,28 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: Excel store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(ExcelSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'ExcelSource' # type: str self.store_settings = store_settings @@ -16474,41 +22117,41 @@ class ExecuteDataFlowActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param data_flow: Required. Data flow reference. - :type data_flow: ~azure.mgmt.datafactory.models.DataFlowReference - :param staging: Staging info for execute data flow activity. - :type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo - :param integration_runtime: The integration runtime reference. - :type integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param compute: Compute properties for data flow activity. - :type compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute - :param trace_level: Trace level setting used for data flow monitoring output. Supported values + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar data_flow: Required. Data flow reference. + :vartype data_flow: ~azure.mgmt.datafactory.models.DataFlowReference + :ivar staging: Staging info for execute data flow activity. + :vartype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo + :ivar integration_runtime: The integration runtime reference. + :vartype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar compute: Compute properties for data flow activity. + :vartype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute + :ivar trace_level: Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). - :type trace_level: any - :param continue_on_error: Continue on error setting used for data flow execution. Enables + :vartype trace_level: any + :ivar continue_on_error: Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). - :type continue_on_error: any - :param run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with + :vartype continue_on_error: any + :ivar run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). - :type run_concurrently: any + :vartype run_concurrently: any """ _validation = { @@ -16554,6 +22197,41 @@ def __init__( run_concurrently: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword data_flow: Required. Data flow reference. + :paramtype data_flow: ~azure.mgmt.datafactory.models.DataFlowReference + :keyword staging: Staging info for execute data flow activity. + :paramtype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo + :keyword integration_runtime: The integration runtime reference. + :paramtype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword compute: Compute properties for data flow activity. + :paramtype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute + :keyword trace_level: Trace level setting used for data flow monitoring output. Supported + values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). + :paramtype trace_level: any + :keyword continue_on_error: Continue on error setting used for data flow execution. Enables + processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). + :paramtype continue_on_error: any + :keyword run_concurrently: Concurrent run setting used for data flow execution. Allows sinks + with the same save order to be processed concurrently. Type: boolean (or Expression with + resultType boolean). + :paramtype run_concurrently: any + """ super(ExecuteDataFlowActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'ExecuteDataFlow' # type: str self.data_flow = data_flow @@ -16570,24 +22248,24 @@ class ExecuteDataFlowActivityTypeProperties(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param data_flow: Required. Data flow reference. - :type data_flow: ~azure.mgmt.datafactory.models.DataFlowReference - :param staging: Staging info for execute data flow activity. - :type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo - :param integration_runtime: The integration runtime reference. - :type integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param compute: Compute properties for data flow activity. - :type compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute - :param trace_level: Trace level setting used for data flow monitoring output. Supported values + :ivar data_flow: Required. Data flow reference. + :vartype data_flow: ~azure.mgmt.datafactory.models.DataFlowReference + :ivar staging: Staging info for execute data flow activity. + :vartype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo + :ivar integration_runtime: The integration runtime reference. + :vartype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar compute: Compute properties for data flow activity. + :vartype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute + :ivar trace_level: Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). - :type trace_level: any - :param continue_on_error: Continue on error setting used for data flow execution. Enables + :vartype trace_level: any + :ivar continue_on_error: Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). - :type continue_on_error: any - :param run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with + :vartype continue_on_error: any + :ivar run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). - :type run_concurrently: any + :vartype run_concurrently: any """ _validation = { @@ -16616,6 +22294,26 @@ def __init__( run_concurrently: Optional[Any] = None, **kwargs ): + """ + :keyword data_flow: Required. Data flow reference. + :paramtype data_flow: ~azure.mgmt.datafactory.models.DataFlowReference + :keyword staging: Staging info for execute data flow activity. + :paramtype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo + :keyword integration_runtime: The integration runtime reference. + :paramtype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword compute: Compute properties for data flow activity. + :paramtype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute + :keyword trace_level: Trace level setting used for data flow monitoring output. Supported + values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). + :paramtype trace_level: any + :keyword continue_on_error: Continue on error setting used for data flow execution. Enables + processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). + :paramtype continue_on_error: any + :keyword run_concurrently: Concurrent run setting used for data flow execution. Allows sinks + with the same save order to be processed concurrently. Type: boolean (or Expression with + resultType boolean). + :paramtype run_concurrently: any + """ super(ExecuteDataFlowActivityTypeProperties, self).__init__(**kwargs) self.data_flow = data_flow self.staging = staging @@ -16629,13 +22327,13 @@ def __init__( class ExecuteDataFlowActivityTypePropertiesCompute(msrest.serialization.Model): """Compute properties for data flow activity. - :param compute_type: Compute type of the cluster which will execute data flow job. Possible + :ivar compute_type: Compute type of the cluster which will execute data flow job. Possible values include: 'General', 'MemoryOptimized', 'ComputeOptimized'. Type: string (or Expression with resultType string). - :type compute_type: any - :param core_count: Core count of the cluster which will execute data flow job. Supported values + :vartype compute_type: any + :ivar core_count: Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. Type: integer (or Expression with resultType integer). - :type core_count: any + :vartype core_count: any """ _attribute_map = { @@ -16650,6 +22348,16 @@ def __init__( core_count: Optional[Any] = None, **kwargs ): + """ + :keyword compute_type: Compute type of the cluster which will execute data flow job. Possible + values include: 'General', 'MemoryOptimized', 'ComputeOptimized'. Type: string (or Expression + with resultType string). + :paramtype compute_type: any + :keyword core_count: Core count of the cluster which will execute data flow job. Supported + values are: 8, 16, 32, 48, 80, 144 and 272. Type: integer (or Expression with resultType + integer). + :paramtype core_count: any + """ super(ExecuteDataFlowActivityTypePropertiesCompute, self).__init__(**kwargs) self.compute_type = compute_type self.core_count = core_count @@ -16660,26 +22368,26 @@ class ExecutePipelineActivity(ControlActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param pipeline: Required. Pipeline reference. - :type pipeline: ~azure.mgmt.datafactory.models.PipelineReference - :param parameters: Pipeline parameters. - :type parameters: dict[str, any] - :param wait_on_completion: Defines whether activity execution will wait for the dependent + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar pipeline: Required. Pipeline reference. + :vartype pipeline: ~azure.mgmt.datafactory.models.PipelineReference + :ivar parameters: Pipeline parameters. + :vartype parameters: dict[str, any] + :ivar wait_on_completion: Defines whether activity execution will wait for the dependent pipeline execution to finish. Default is false. - :type wait_on_completion: bool + :vartype wait_on_completion: bool """ _validation = { @@ -16713,6 +22421,26 @@ def __init__( wait_on_completion: Optional[bool] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword pipeline: Required. Pipeline reference. + :paramtype pipeline: ~azure.mgmt.datafactory.models.PipelineReference + :keyword parameters: Pipeline parameters. + :paramtype parameters: dict[str, any] + :keyword wait_on_completion: Defines whether activity execution will wait for the dependent + pipeline execution to finish. Default is false. + :paramtype wait_on_completion: bool + """ super(ExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.type = 'ExecutePipeline' # type: str self.pipeline = pipeline @@ -16725,29 +22453,29 @@ class ExecutePowerQueryActivityTypeProperties(ExecuteDataFlowActivityTypePropert All required parameters must be populated in order to send to Azure. - :param data_flow: Required. Data flow reference. - :type data_flow: ~azure.mgmt.datafactory.models.DataFlowReference - :param staging: Staging info for execute data flow activity. - :type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo - :param integration_runtime: The integration runtime reference. - :type integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param compute: Compute properties for data flow activity. - :type compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute - :param trace_level: Trace level setting used for data flow monitoring output. Supported values + :ivar data_flow: Required. Data flow reference. + :vartype data_flow: ~azure.mgmt.datafactory.models.DataFlowReference + :ivar staging: Staging info for execute data flow activity. + :vartype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo + :ivar integration_runtime: The integration runtime reference. + :vartype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar compute: Compute properties for data flow activity. + :vartype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute + :ivar trace_level: Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). - :type trace_level: any - :param continue_on_error: Continue on error setting used for data flow execution. Enables + :vartype trace_level: any + :ivar continue_on_error: Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). - :type continue_on_error: any - :param run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with + :vartype continue_on_error: any + :ivar run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). - :type run_concurrently: any - :param sinks: (Deprecated. Please use Queries). List of Power Query activity sinks mapped to a + :vartype run_concurrently: any + :ivar sinks: (Deprecated. Please use Queries). List of Power Query activity sinks mapped to a queryName. - :type sinks: dict[str, ~azure.mgmt.datafactory.models.PowerQuerySink] - :param queries: List of mapping for Power Query mashup query to sink dataset(s). - :type queries: list[~azure.mgmt.datafactory.models.PowerQuerySinkMapping] + :vartype sinks: dict[str, ~azure.mgmt.datafactory.models.PowerQuerySink] + :ivar queries: List of mapping for Power Query mashup query to sink dataset(s). + :vartype queries: list[~azure.mgmt.datafactory.models.PowerQuerySinkMapping] """ _validation = { @@ -16780,6 +22508,31 @@ def __init__( queries: Optional[List["PowerQuerySinkMapping"]] = None, **kwargs ): + """ + :keyword data_flow: Required. Data flow reference. + :paramtype data_flow: ~azure.mgmt.datafactory.models.DataFlowReference + :keyword staging: Staging info for execute data flow activity. + :paramtype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo + :keyword integration_runtime: The integration runtime reference. + :paramtype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword compute: Compute properties for data flow activity. + :paramtype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute + :keyword trace_level: Trace level setting used for data flow monitoring output. Supported + values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). + :paramtype trace_level: any + :keyword continue_on_error: Continue on error setting used for data flow execution. Enables + processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). + :paramtype continue_on_error: any + :keyword run_concurrently: Concurrent run setting used for data flow execution. Allows sinks + with the same save order to be processed concurrently. Type: boolean (or Expression with + resultType boolean). + :paramtype run_concurrently: any + :keyword sinks: (Deprecated. Please use Queries). List of Power Query activity sinks mapped to + a queryName. + :paramtype sinks: dict[str, ~azure.mgmt.datafactory.models.PowerQuerySink] + :keyword queries: List of mapping for Power Query mashup query to sink dataset(s). + :paramtype queries: list[~azure.mgmt.datafactory.models.PowerQuerySinkMapping] + """ super(ExecutePowerQueryActivityTypeProperties, self).__init__(data_flow=data_flow, staging=staging, integration_runtime=integration_runtime, compute=compute, trace_level=trace_level, continue_on_error=continue_on_error, run_concurrently=run_concurrently, **kwargs) self.sinks = sinks self.queries = queries @@ -16790,54 +22543,54 @@ class ExecuteSSISPackageActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param package_location: Required. SSIS package location. - :type package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation - :param runtime: Specifies the runtime to execute SSIS package. The value should be "x86" or + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar package_location: Required. SSIS package location. + :vartype package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation + :ivar runtime: Specifies the runtime to execute SSIS package. The value should be "x86" or "x64". Type: string (or Expression with resultType string). - :type runtime: any - :param logging_level: The logging level of SSIS package execution. Type: string (or Expression + :vartype runtime: any + :ivar logging_level: The logging level of SSIS package execution. Type: string (or Expression with resultType string). - :type logging_level: any - :param environment_path: The environment path to execute the SSIS package. Type: string (or + :vartype logging_level: any + :ivar environment_path: The environment path to execute the SSIS package. Type: string (or Expression with resultType string). - :type environment_path: any - :param execution_credential: The package execution credential. - :type execution_credential: ~azure.mgmt.datafactory.models.SSISExecutionCredential - :param connect_via: Required. The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param project_parameters: The project level parameters to execute the SSIS package. - :type project_parameters: dict[str, ~azure.mgmt.datafactory.models.SSISExecutionParameter] - :param package_parameters: The package level parameters to execute the SSIS package. - :type package_parameters: dict[str, ~azure.mgmt.datafactory.models.SSISExecutionParameter] - :param project_connection_managers: The project level connection managers to execute the SSIS + :vartype environment_path: any + :ivar execution_credential: The package execution credential. + :vartype execution_credential: ~azure.mgmt.datafactory.models.SSISExecutionCredential + :ivar connect_via: Required. The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar project_parameters: The project level parameters to execute the SSIS package. + :vartype project_parameters: dict[str, ~azure.mgmt.datafactory.models.SSISExecutionParameter] + :ivar package_parameters: The package level parameters to execute the SSIS package. + :vartype package_parameters: dict[str, ~azure.mgmt.datafactory.models.SSISExecutionParameter] + :ivar project_connection_managers: The project level connection managers to execute the SSIS package. - :type project_connection_managers: dict[str, dict[str, + :vartype project_connection_managers: dict[str, dict[str, ~azure.mgmt.datafactory.models.SSISExecutionParameter]] - :param package_connection_managers: The package level connection managers to execute the SSIS + :ivar package_connection_managers: The package level connection managers to execute the SSIS package. - :type package_connection_managers: dict[str, dict[str, + :vartype package_connection_managers: dict[str, dict[str, ~azure.mgmt.datafactory.models.SSISExecutionParameter]] - :param property_overrides: The property overrides to execute the SSIS package. - :type property_overrides: dict[str, ~azure.mgmt.datafactory.models.SSISPropertyOverride] - :param log_location: SSIS package execution log location. - :type log_location: ~azure.mgmt.datafactory.models.SSISLogLocation + :ivar property_overrides: The property overrides to execute the SSIS package. + :vartype property_overrides: dict[str, ~azure.mgmt.datafactory.models.SSISPropertyOverride] + :ivar log_location: SSIS package execution log location. + :vartype log_location: ~azure.mgmt.datafactory.models.SSISLogLocation """ _validation = { @@ -16894,6 +22647,54 @@ def __init__( log_location: Optional["SSISLogLocation"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword package_location: Required. SSIS package location. + :paramtype package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation + :keyword runtime: Specifies the runtime to execute SSIS package. The value should be "x86" or + "x64". Type: string (or Expression with resultType string). + :paramtype runtime: any + :keyword logging_level: The logging level of SSIS package execution. Type: string (or + Expression with resultType string). + :paramtype logging_level: any + :keyword environment_path: The environment path to execute the SSIS package. Type: string (or + Expression with resultType string). + :paramtype environment_path: any + :keyword execution_credential: The package execution credential. + :paramtype execution_credential: ~azure.mgmt.datafactory.models.SSISExecutionCredential + :keyword connect_via: Required. The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword project_parameters: The project level parameters to execute the SSIS package. + :paramtype project_parameters: dict[str, ~azure.mgmt.datafactory.models.SSISExecutionParameter] + :keyword package_parameters: The package level parameters to execute the SSIS package. + :paramtype package_parameters: dict[str, ~azure.mgmt.datafactory.models.SSISExecutionParameter] + :keyword project_connection_managers: The project level connection managers to execute the SSIS + package. + :paramtype project_connection_managers: dict[str, dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter]] + :keyword package_connection_managers: The package level connection managers to execute the SSIS + package. + :paramtype package_connection_managers: dict[str, dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter]] + :keyword property_overrides: The property overrides to execute the SSIS package. + :paramtype property_overrides: dict[str, ~azure.mgmt.datafactory.models.SSISPropertyOverride] + :keyword log_location: SSIS package execution log location. + :paramtype log_location: ~azure.mgmt.datafactory.models.SSISLogLocation + """ super(ExecuteSSISPackageActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'ExecuteSSISPackage' # type: str self.package_location = package_location @@ -16915,44 +22716,44 @@ class ExecuteWranglingDataflowActivity(Activity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param data_flow: Required. Data flow reference. - :type data_flow: ~azure.mgmt.datafactory.models.DataFlowReference - :param staging: Staging info for execute data flow activity. - :type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo - :param integration_runtime: The integration runtime reference. - :type integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param compute: Compute properties for data flow activity. - :type compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute - :param trace_level: Trace level setting used for data flow monitoring output. Supported values + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar data_flow: Required. Data flow reference. + :vartype data_flow: ~azure.mgmt.datafactory.models.DataFlowReference + :ivar staging: Staging info for execute data flow activity. + :vartype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo + :ivar integration_runtime: The integration runtime reference. + :vartype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar compute: Compute properties for data flow activity. + :vartype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute + :ivar trace_level: Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). - :type trace_level: any - :param continue_on_error: Continue on error setting used for data flow execution. Enables + :vartype trace_level: any + :ivar continue_on_error: Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). - :type continue_on_error: any - :param run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with + :vartype continue_on_error: any + :ivar run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). - :type run_concurrently: any - :param sinks: (Deprecated. Please use Queries). List of Power Query activity sinks mapped to a + :vartype run_concurrently: any + :ivar sinks: (Deprecated. Please use Queries). List of Power Query activity sinks mapped to a queryName. - :type sinks: dict[str, ~azure.mgmt.datafactory.models.PowerQuerySink] - :param queries: List of mapping for Power Query mashup query to sink dataset(s). - :type queries: list[~azure.mgmt.datafactory.models.PowerQuerySinkMapping] + :vartype sinks: dict[str, ~azure.mgmt.datafactory.models.PowerQuerySink] + :ivar queries: List of mapping for Power Query mashup query to sink dataset(s). + :vartype queries: list[~azure.mgmt.datafactory.models.PowerQuerySinkMapping] """ _validation = { @@ -17000,6 +22801,44 @@ def __init__( queries: Optional[List["PowerQuerySinkMapping"]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword data_flow: Required. Data flow reference. + :paramtype data_flow: ~azure.mgmt.datafactory.models.DataFlowReference + :keyword staging: Staging info for execute data flow activity. + :paramtype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo + :keyword integration_runtime: The integration runtime reference. + :paramtype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword compute: Compute properties for data flow activity. + :paramtype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute + :keyword trace_level: Trace level setting used for data flow monitoring output. Supported + values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). + :paramtype trace_level: any + :keyword continue_on_error: Continue on error setting used for data flow execution. Enables + processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). + :paramtype continue_on_error: any + :keyword run_concurrently: Concurrent run setting used for data flow execution. Allows sinks + with the same save order to be processed concurrently. Type: boolean (or Expression with + resultType boolean). + :paramtype run_concurrently: any + :keyword sinks: (Deprecated. Please use Queries). List of Power Query activity sinks mapped to + a queryName. + :paramtype sinks: dict[str, ~azure.mgmt.datafactory.models.PowerQuerySink] + :keyword queries: List of mapping for Power Query mashup query to sink dataset(s). + :paramtype queries: list[~azure.mgmt.datafactory.models.PowerQuerySinkMapping] + """ super(ExecuteWranglingDataflowActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.type = 'ExecuteWranglingDataflow' # type: str self.policy = policy @@ -17019,8 +22858,8 @@ class ExposureControlBatchRequest(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param exposure_control_requests: Required. List of exposure control features. - :type exposure_control_requests: list[~azure.mgmt.datafactory.models.ExposureControlRequest] + :ivar exposure_control_requests: Required. List of exposure control features. + :vartype exposure_control_requests: list[~azure.mgmt.datafactory.models.ExposureControlRequest] """ _validation = { @@ -17037,6 +22876,11 @@ def __init__( exposure_control_requests: List["ExposureControlRequest"], **kwargs ): + """ + :keyword exposure_control_requests: Required. List of exposure control features. + :paramtype exposure_control_requests: + list[~azure.mgmt.datafactory.models.ExposureControlRequest] + """ super(ExposureControlBatchRequest, self).__init__(**kwargs) self.exposure_control_requests = exposure_control_requests @@ -17046,8 +22890,9 @@ class ExposureControlBatchResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param exposure_control_responses: Required. List of exposure control feature values. - :type exposure_control_responses: list[~azure.mgmt.datafactory.models.ExposureControlResponse] + :ivar exposure_control_responses: Required. List of exposure control feature values. + :vartype exposure_control_responses: + list[~azure.mgmt.datafactory.models.ExposureControlResponse] """ _validation = { @@ -17064,6 +22909,11 @@ def __init__( exposure_control_responses: List["ExposureControlResponse"], **kwargs ): + """ + :keyword exposure_control_responses: Required. List of exposure control feature values. + :paramtype exposure_control_responses: + list[~azure.mgmt.datafactory.models.ExposureControlResponse] + """ super(ExposureControlBatchResponse, self).__init__(**kwargs) self.exposure_control_responses = exposure_control_responses @@ -17071,10 +22921,10 @@ def __init__( class ExposureControlRequest(msrest.serialization.Model): """The exposure control request. - :param feature_name: The feature name. - :type feature_name: str - :param feature_type: The feature type. - :type feature_type: str + :ivar feature_name: The feature name. + :vartype feature_name: str + :ivar feature_type: The feature type. + :vartype feature_type: str """ _attribute_map = { @@ -17089,6 +22939,12 @@ def __init__( feature_type: Optional[str] = None, **kwargs ): + """ + :keyword feature_name: The feature name. + :paramtype feature_name: str + :keyword feature_type: The feature type. + :paramtype feature_type: str + """ super(ExposureControlRequest, self).__init__(**kwargs) self.feature_name = feature_name self.feature_type = feature_type @@ -17119,6 +22975,8 @@ def __init__( self, **kwargs ): + """ + """ super(ExposureControlResponse, self).__init__(**kwargs) self.feature_name = None self.value = None @@ -17133,8 +22991,8 @@ class Expression(msrest.serialization.Model): :ivar type: Expression type. Has constant value: "Expression". :vartype type: str - :param value: Required. Expression value. - :type value: str + :ivar value: Required. Expression value. + :vartype value: str """ _validation = { @@ -17155,6 +23013,10 @@ def __init__( value: str, **kwargs ): + """ + :keyword value: Required. Expression value. + :paramtype value: str + """ super(Expression, self).__init__(**kwargs) self.value = value @@ -17170,10 +23032,10 @@ class Resource(msrest.serialization.Model): :vartype name: str :ivar type: The resource type. :vartype type: str - :param location: The resource location. - :type location: str - :param tags: A set of tags. The resource tags. - :type tags: dict[str, str] + :ivar location: The resource location. + :vartype location: str + :ivar tags: A set of tags. The resource tags. + :vartype tags: dict[str, str] :ivar e_tag: Etag identifies change in the resource. :vartype e_tag: str """ @@ -17201,6 +23063,12 @@ def __init__( tags: Optional[Dict[str, str]] = None, **kwargs ): + """ + :keyword location: The resource location. + :paramtype location: str + :keyword tags: A set of tags. The resource tags. + :paramtype tags: dict[str, str] + """ super(Resource, self).__init__(**kwargs) self.id = None self.name = None @@ -17221,32 +23089,33 @@ class Factory(Resource): :vartype name: str :ivar type: The resource type. :vartype type: str - :param location: The resource location. - :type location: str - :param tags: A set of tags. The resource tags. - :type tags: dict[str, str] + :ivar location: The resource location. + :vartype location: str + :ivar tags: A set of tags. The resource tags. + :vartype tags: dict[str, str] :ivar e_tag: Etag identifies change in the resource. :vartype e_tag: str - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param identity: Managed service identity of the factory. - :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity + :vartype additional_properties: dict[str, any] + :ivar identity: Managed service identity of the factory. + :vartype identity: ~azure.mgmt.datafactory.models.FactoryIdentity :ivar provisioning_state: Factory provisioning state, example Succeeded. :vartype provisioning_state: str :ivar create_time: Time the factory was created in ISO8601 format. :vartype create_time: ~datetime.datetime :ivar version: Version of the factory. :vartype version: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration - :param global_parameters: List of parameters for factory. - :type global_parameters: dict[str, ~azure.mgmt.datafactory.models.GlobalParameterSpecification] - :param encryption: Properties to enable Customer Managed Key for the factory. - :type encryption: ~azure.mgmt.datafactory.models.EncryptionConfiguration - :param public_network_access: Whether or not public network access is allowed for the data + :ivar repo_configuration: Git repo information of the factory. + :vartype repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + :ivar global_parameters: List of parameters for factory. + :vartype global_parameters: dict[str, + ~azure.mgmt.datafactory.models.GlobalParameterSpecification] + :ivar encryption: Properties to enable Customer Managed Key for the factory. + :vartype encryption: ~azure.mgmt.datafactory.models.EncryptionConfiguration + :ivar public_network_access: Whether or not public network access is allowed for the data factory. Possible values include: "Enabled", "Disabled". - :type public_network_access: str or ~azure.mgmt.datafactory.models.PublicNetworkAccess + :vartype public_network_access: str or ~azure.mgmt.datafactory.models.PublicNetworkAccess """ _validation = { @@ -17290,6 +23159,27 @@ def __init__( public_network_access: Optional[Union[str, "PublicNetworkAccess"]] = None, **kwargs ): + """ + :keyword location: The resource location. + :paramtype location: str + :keyword tags: A set of tags. The resource tags. + :paramtype tags: dict[str, str] + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword identity: Managed service identity of the factory. + :paramtype identity: ~azure.mgmt.datafactory.models.FactoryIdentity + :keyword repo_configuration: Git repo information of the factory. + :paramtype repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + :keyword global_parameters: List of parameters for factory. + :paramtype global_parameters: dict[str, + ~azure.mgmt.datafactory.models.GlobalParameterSpecification] + :keyword encryption: Properties to enable Customer Managed Key for the factory. + :paramtype encryption: ~azure.mgmt.datafactory.models.EncryptionConfiguration + :keyword public_network_access: Whether or not public network access is allowed for the data + factory. Possible values include: "Enabled", "Disabled". + :paramtype public_network_access: str or ~azure.mgmt.datafactory.models.PublicNetworkAccess + """ super(Factory, self).__init__(location=location, tags=tags, **kwargs) self.additional_properties = additional_properties self.identity = identity @@ -17310,18 +23200,18 @@ class FactoryRepoConfiguration(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of repo configuration.Constant filled by server. - :type type: str - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str + :ivar type: Required. Type of repo configuration.Constant filled by server. + :vartype type: str + :ivar account_name: Required. Account name. + :vartype account_name: str + :ivar repository_name: Required. Repository name. + :vartype repository_name: str + :ivar collaboration_branch: Required. Collaboration branch. + :vartype collaboration_branch: str + :ivar root_folder: Required. Root folder. + :vartype root_folder: str + :ivar last_commit_id: Last commit id. + :vartype last_commit_id: str """ _validation = { @@ -17355,6 +23245,18 @@ def __init__( last_commit_id: Optional[str] = None, **kwargs ): + """ + :keyword account_name: Required. Account name. + :paramtype account_name: str + :keyword repository_name: Required. Repository name. + :paramtype repository_name: str + :keyword collaboration_branch: Required. Collaboration branch. + :paramtype collaboration_branch: str + :keyword root_folder: Required. Root folder. + :paramtype root_folder: str + :keyword last_commit_id: Last commit id. + :paramtype last_commit_id: str + """ super(FactoryRepoConfiguration, self).__init__(**kwargs) self.type = None # type: Optional[str] self.account_name = account_name @@ -17369,24 +23271,24 @@ class FactoryGitHubConfiguration(FactoryRepoConfiguration): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of repo configuration.Constant filled by server. - :type type: str - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param host_name: GitHub Enterprise host name. For example: https://github.mydomain.com. - :type host_name: str - :param client_id: GitHub bring your own app client id. - :type client_id: str - :param client_secret: GitHub bring your own app client secret information. - :type client_secret: ~azure.mgmt.datafactory.models.GitHubClientSecret + :ivar type: Required. Type of repo configuration.Constant filled by server. + :vartype type: str + :ivar account_name: Required. Account name. + :vartype account_name: str + :ivar repository_name: Required. Repository name. + :vartype repository_name: str + :ivar collaboration_branch: Required. Collaboration branch. + :vartype collaboration_branch: str + :ivar root_folder: Required. Root folder. + :vartype root_folder: str + :ivar last_commit_id: Last commit id. + :vartype last_commit_id: str + :ivar host_name: GitHub Enterprise host name. For example: https://github.mydomain.com. + :vartype host_name: str + :ivar client_id: GitHub bring your own app client id. + :vartype client_id: str + :ivar client_secret: GitHub bring your own app client secret information. + :vartype client_secret: ~azure.mgmt.datafactory.models.GitHubClientSecret """ _validation = { @@ -17422,6 +23324,24 @@ def __init__( client_secret: Optional["GitHubClientSecret"] = None, **kwargs ): + """ + :keyword account_name: Required. Account name. + :paramtype account_name: str + :keyword repository_name: Required. Repository name. + :paramtype repository_name: str + :keyword collaboration_branch: Required. Collaboration branch. + :paramtype collaboration_branch: str + :keyword root_folder: Required. Root folder. + :paramtype root_folder: str + :keyword last_commit_id: Last commit id. + :paramtype last_commit_id: str + :keyword host_name: GitHub Enterprise host name. For example: https://github.mydomain.com. + :paramtype host_name: str + :keyword client_id: GitHub bring your own app client id. + :paramtype client_id: str + :keyword client_secret: GitHub bring your own app client secret information. + :paramtype client_secret: ~azure.mgmt.datafactory.models.GitHubClientSecret + """ super(FactoryGitHubConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) self.type = 'FactoryGitHubConfiguration' # type: str self.host_name = host_name @@ -17436,15 +23356,15 @@ class FactoryIdentity(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. The identity type. Possible values include: "SystemAssigned", + :ivar type: Required. The identity type. Possible values include: "SystemAssigned", "UserAssigned", "SystemAssigned,UserAssigned". - :type type: str or ~azure.mgmt.datafactory.models.FactoryIdentityType + :vartype type: str or ~azure.mgmt.datafactory.models.FactoryIdentityType :ivar principal_id: The principal id of the identity. :vartype principal_id: str :ivar tenant_id: The client tenant id of the identity. :vartype tenant_id: str - :param user_assigned_identities: List of user assigned identities for the factory. - :type user_assigned_identities: dict[str, any] + :ivar user_assigned_identities: List of user assigned identities for the factory. + :vartype user_assigned_identities: dict[str, any] """ _validation = { @@ -17467,6 +23387,13 @@ def __init__( user_assigned_identities: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword type: Required. The identity type. Possible values include: "SystemAssigned", + "UserAssigned", "SystemAssigned,UserAssigned". + :paramtype type: str or ~azure.mgmt.datafactory.models.FactoryIdentityType + :keyword user_assigned_identities: List of user assigned identities for the factory. + :paramtype user_assigned_identities: dict[str, any] + """ super(FactoryIdentity, self).__init__(**kwargs) self.type = type self.principal_id = None @@ -17479,10 +23406,10 @@ class FactoryListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of factories. - :type value: list[~azure.mgmt.datafactory.models.Factory] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: Required. List of factories. + :vartype value: list[~azure.mgmt.datafactory.models.Factory] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _validation = { @@ -17501,6 +23428,12 @@ def __init__( next_link: Optional[str] = None, **kwargs ): + """ + :keyword value: Required. List of factories. + :paramtype value: list[~azure.mgmt.datafactory.models.Factory] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(FactoryListResponse, self).__init__(**kwargs) self.value = value self.next_link = next_link @@ -17509,10 +23442,10 @@ def __init__( class FactoryRepoUpdate(msrest.serialization.Model): """Factory's git repo information. - :param factory_resource_id: The factory resource id. - :type factory_resource_id: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + :ivar factory_resource_id: The factory resource id. + :vartype factory_resource_id: str + :ivar repo_configuration: Git repo information of the factory. + :vartype repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration """ _attribute_map = { @@ -17527,6 +23460,12 @@ def __init__( repo_configuration: Optional["FactoryRepoConfiguration"] = None, **kwargs ): + """ + :keyword factory_resource_id: The factory resource id. + :paramtype factory_resource_id: str + :keyword repo_configuration: Git repo information of the factory. + :paramtype repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + """ super(FactoryRepoUpdate, self).__init__(**kwargs) self.factory_resource_id = factory_resource_id self.repo_configuration = repo_configuration @@ -17535,13 +23474,13 @@ def __init__( class FactoryUpdateParameters(msrest.serialization.Model): """Parameters for updating a factory resource. - :param tags: A set of tags. The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity - :param public_network_access: Whether or not public network access is allowed for the data + :ivar tags: A set of tags. The resource tags. + :vartype tags: dict[str, str] + :ivar identity: Managed service identity of the factory. + :vartype identity: ~azure.mgmt.datafactory.models.FactoryIdentity + :ivar public_network_access: Whether or not public network access is allowed for the data factory. Possible values include: "Enabled", "Disabled". - :type public_network_access: str or ~azure.mgmt.datafactory.models.PublicNetworkAccess + :vartype public_network_access: str or ~azure.mgmt.datafactory.models.PublicNetworkAccess """ _attribute_map = { @@ -17558,6 +23497,15 @@ def __init__( public_network_access: Optional[Union[str, "PublicNetworkAccess"]] = None, **kwargs ): + """ + :keyword tags: A set of tags. The resource tags. + :paramtype tags: dict[str, str] + :keyword identity: Managed service identity of the factory. + :paramtype identity: ~azure.mgmt.datafactory.models.FactoryIdentity + :keyword public_network_access: Whether or not public network access is allowed for the data + factory. Possible values include: "Enabled", "Disabled". + :paramtype public_network_access: str or ~azure.mgmt.datafactory.models.PublicNetworkAccess + """ super(FactoryUpdateParameters, self).__init__(**kwargs) self.tags = tags self.identity = identity @@ -17569,22 +23517,22 @@ class FactoryVSTSConfiguration(FactoryRepoConfiguration): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of repo configuration.Constant filled by server. - :type type: str - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param project_name: Required. VSTS project name. - :type project_name: str - :param tenant_id: VSTS tenant id. - :type tenant_id: str + :ivar type: Required. Type of repo configuration.Constant filled by server. + :vartype type: str + :ivar account_name: Required. Account name. + :vartype account_name: str + :ivar repository_name: Required. Repository name. + :vartype repository_name: str + :ivar collaboration_branch: Required. Collaboration branch. + :vartype collaboration_branch: str + :ivar root_folder: Required. Root folder. + :vartype root_folder: str + :ivar last_commit_id: Last commit id. + :vartype last_commit_id: str + :ivar project_name: Required. VSTS project name. + :vartype project_name: str + :ivar tenant_id: VSTS tenant id. + :vartype tenant_id: str """ _validation = { @@ -17619,6 +23567,22 @@ def __init__( tenant_id: Optional[str] = None, **kwargs ): + """ + :keyword account_name: Required. Account name. + :paramtype account_name: str + :keyword repository_name: Required. Repository name. + :paramtype repository_name: str + :keyword collaboration_branch: Required. Collaboration branch. + :paramtype collaboration_branch: str + :keyword root_folder: Required. Root folder. + :paramtype root_folder: str + :keyword last_commit_id: Last commit id. + :paramtype last_commit_id: str + :keyword project_name: Required. VSTS project name. + :paramtype project_name: str + :keyword tenant_id: VSTS tenant id. + :paramtype tenant_id: str + """ super(FactoryVSTSConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) self.type = 'FactoryVSTSConfiguration' # type: str self.project_name = project_name @@ -17630,31 +23594,31 @@ class FileServerLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. Host name of the server. Type: string (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. Host name of the server. Type: string (or Expression with resultType string). - :type host: any - :param user_id: User ID to logon the server. Type: string (or Expression with resultType + :vartype host: any + :ivar user_id: User ID to logon the server. Type: string (or Expression with resultType string). - :type user_id: any - :param password: Password to logon the server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype user_id: any + :ivar password: Password to logon the server. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -17689,6 +23653,31 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. Host name of the server. Type: string (or Expression with resultType + string). + :paramtype host: any + :keyword user_id: User ID to logon the server. Type: string (or Expression with resultType + string). + :paramtype user_id: any + :keyword password: Password to logon the server. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(FileServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'FileServer' # type: str self.host = host @@ -17702,17 +23691,17 @@ class FileServerLocation(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any + :vartype file_name: any """ _validation = { @@ -17734,6 +23723,17 @@ def __init__( file_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + """ super(FileServerLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) self.type = 'FileServerLocation' # type: str @@ -17743,47 +23743,47 @@ class FileServerReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string (or Expression with + :vartype recursive: any + :ivar wildcard_folder_path: FileServer wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: any - :param wildcard_file_name: FileServer wildcardFileName. Type: string (or Expression with + :vartype wildcard_folder_path: any + :ivar wildcard_file_name: FileServer wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: any - :param file_list_path: Point to a text file that lists each file (relative path to the path + :vartype wildcard_file_name: any + :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype file_list_path: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any - :param delete_files_after_completion: Indicates whether the source files need to be deleted + :vartype partition_root_path: any + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: any - :param modified_datetime_start: The start of file's modified datetime. Type: string (or + :vartype delete_files_after_completion: any + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any - :param file_filter: Specify a filter to be used to select a subset of files in the folderPath + :vartype modified_datetime_end: any + :ivar file_filter: Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). - :type file_filter: any + :vartype file_filter: any """ _validation = { @@ -17825,6 +23825,47 @@ def __init__( file_filter: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword wildcard_folder_path: FileServer wildcardFolderPath. Type: string (or Expression with + resultType string). + :paramtype wildcard_folder_path: any + :keyword wildcard_file_name: FileServer wildcardFileName. Type: string (or Expression with + resultType string). + :paramtype wildcard_file_name: any + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: any + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + :keyword file_filter: Specify a filter to be used to select a subset of files in the folderPath + rather than all files. Type: string (or Expression with resultType string). + :paramtype file_filter: any + """ super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'FileServerReadSettings' # type: str self.recursive = recursive @@ -17844,19 +23885,19 @@ class FileServerWriteSettings(StoreWriteSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: any + :vartype disable_metrics_collection: any + :ivar copy_behavior: The type of copy behavior for copy sink. + :vartype copy_behavior: any """ _validation = { @@ -17880,6 +23921,19 @@ def __init__( copy_behavior: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword copy_behavior: The type of copy behavior for copy sink. + :paramtype copy_behavior: any + """ super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'FileServerWriteSettings' # type: str @@ -17889,47 +23943,47 @@ class FileShareDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param folder_path: The path of the on-premises file system. Type: string (or Expression with + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar folder_path: The path of the on-premises file system. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: The name of the on-premises file system. Type: string (or Expression with + :vartype folder_path: any + :ivar file_name: The name of the on-premises file system. Type: string (or Expression with resultType string). - :type file_name: any - :param modified_datetime_start: The start of file's modified datetime. Type: string (or + :vartype file_name: any + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any - :param format: The format of the files. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param file_filter: Specify a filter to be used to select a subset of files in the folderPath + :vartype modified_datetime_end: any + :ivar format: The format of the files. + :vartype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :ivar file_filter: Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). - :type file_filter: any - :param compression: The data compression method used for the file system. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + :vartype file_filter: any + :ivar compression: The data compression method used for the file system. + :vartype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -17976,6 +24030,47 @@ def __init__( compression: Optional["DatasetCompression"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword folder_path: The path of the on-premises file system. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: The name of the on-premises file system. Type: string (or Expression with + resultType string). + :paramtype file_name: any + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + :keyword format: The format of the files. + :paramtype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :keyword file_filter: Specify a filter to be used to select a subset of files in the folderPath + rather than all files. Type: string (or Expression with resultType string). + :paramtype file_filter: any + :keyword compression: The data compression method used for the file system. + :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ super(FileShareDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'FileShare' # type: str self.folder_path = folder_path @@ -17992,31 +24087,31 @@ class FileSystemSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: any + :vartype disable_metrics_collection: any + :ivar copy_behavior: The type of copy behavior for copy sink. + :vartype copy_behavior: any """ _validation = { @@ -18048,6 +24143,31 @@ def __init__( copy_behavior: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword copy_behavior: The type of copy behavior for copy sink. + :paramtype copy_behavior: any + """ super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'FileSystemSink' # type: str self.copy_behavior = copy_behavior @@ -18058,29 +24178,29 @@ class FileSystemSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype recursive: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -18110,6 +24230,29 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'FileSystemSource' # type: str self.recursive = recursive @@ -18121,23 +24264,23 @@ class FilterActivity(ControlActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param items: Required. Input array on which filter should be applied. - :type items: ~azure.mgmt.datafactory.models.Expression - :param condition: Required. Condition to be used for filtering the input. - :type condition: ~azure.mgmt.datafactory.models.Expression + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar items: Required. Input array on which filter should be applied. + :vartype items: ~azure.mgmt.datafactory.models.Expression + :ivar condition: Required. Condition to be used for filtering the input. + :vartype condition: ~azure.mgmt.datafactory.models.Expression """ _validation = { @@ -18170,6 +24313,23 @@ def __init__( user_properties: Optional[List["UserProperty"]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword items: Required. Input array on which filter should be applied. + :paramtype items: ~azure.mgmt.datafactory.models.Expression + :keyword condition: Required. Condition to be used for filtering the input. + :paramtype condition: ~azure.mgmt.datafactory.models.Expression + """ super(FilterActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.type = 'Filter' # type: str self.items = items @@ -18181,25 +24341,25 @@ class Flowlet(DataFlow): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of data flow.Constant filled by server. - :type type: str - :param description: The description of the data flow. - :type description: str - :param annotations: List of tags that can be used for describing the data flow. - :type annotations: list[any] - :param folder: The folder that this data flow is in. If not specified, Data flow will appear at + :ivar type: Required. Type of data flow.Constant filled by server. + :vartype type: str + :ivar description: The description of the data flow. + :vartype description: str + :ivar annotations: List of tags that can be used for describing the data flow. + :vartype annotations: list[any] + :ivar folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder - :param sources: List of sources in Flowlet. - :type sources: list[~azure.mgmt.datafactory.models.DataFlowSource] - :param sinks: List of sinks in Flowlet. - :type sinks: list[~azure.mgmt.datafactory.models.DataFlowSink] - :param transformations: List of transformations in Flowlet. - :type transformations: list[~azure.mgmt.datafactory.models.Transformation] - :param script: Flowlet script. - :type script: str - :param script_lines: Flowlet script lines. - :type script_lines: list[str] + :vartype folder: ~azure.mgmt.datafactory.models.DataFlowFolder + :ivar sources: List of sources in Flowlet. + :vartype sources: list[~azure.mgmt.datafactory.models.DataFlowSource] + :ivar sinks: List of sinks in Flowlet. + :vartype sinks: list[~azure.mgmt.datafactory.models.DataFlowSink] + :ivar transformations: List of transformations in Flowlet. + :vartype transformations: list[~azure.mgmt.datafactory.models.Transformation] + :ivar script: Flowlet script. + :vartype script: str + :ivar script_lines: Flowlet script lines. + :vartype script_lines: list[str] """ _validation = { @@ -18231,6 +24391,25 @@ def __init__( script_lines: Optional[List[str]] = None, **kwargs ): + """ + :keyword description: The description of the data flow. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the data flow. + :paramtype annotations: list[any] + :keyword folder: The folder that this data flow is in. If not specified, Data flow will appear + at the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DataFlowFolder + :keyword sources: List of sources in Flowlet. + :paramtype sources: list[~azure.mgmt.datafactory.models.DataFlowSource] + :keyword sinks: List of sinks in Flowlet. + :paramtype sinks: list[~azure.mgmt.datafactory.models.DataFlowSink] + :keyword transformations: List of transformations in Flowlet. + :paramtype transformations: list[~azure.mgmt.datafactory.models.Transformation] + :keyword script: Flowlet script. + :paramtype script: str + :keyword script_lines: Flowlet script lines. + :paramtype script_lines: list[str] + """ super(Flowlet, self).__init__(description=description, annotations=annotations, folder=folder, **kwargs) self.type = 'Flowlet' # type: str self.sources = sources @@ -18245,28 +24424,28 @@ class ForEachActivity(ControlActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param is_sequential: Should the loop be executed in sequence or in parallel (max 50). - :type is_sequential: bool - :param batch_count: Batch count to be used for controlling the number of parallel execution + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar is_sequential: Should the loop be executed in sequence or in parallel (max 50). + :vartype is_sequential: bool + :ivar batch_count: Batch count to be used for controlling the number of parallel execution (when isSequential is set to false). - :type batch_count: int - :param items: Required. Collection to iterate. - :type items: ~azure.mgmt.datafactory.models.Expression - :param activities: Required. List of activities to execute . - :type activities: list[~azure.mgmt.datafactory.models.Activity] + :vartype batch_count: int + :ivar items: Required. Collection to iterate. + :vartype items: ~azure.mgmt.datafactory.models.Expression + :ivar activities: Required. List of activities to execute . + :vartype activities: list[~azure.mgmt.datafactory.models.Activity] """ _validation = { @@ -18304,6 +24483,28 @@ def __init__( batch_count: Optional[int] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword is_sequential: Should the loop be executed in sequence or in parallel (max 50). + :paramtype is_sequential: bool + :keyword batch_count: Batch count to be used for controlling the number of parallel execution + (when isSequential is set to false). + :paramtype batch_count: int + :keyword items: Required. Collection to iterate. + :paramtype items: ~azure.mgmt.datafactory.models.Expression + :keyword activities: Required. List of activities to execute . + :paramtype activities: list[~azure.mgmt.datafactory.models.Activity] + """ super(ForEachActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.type = 'ForEach' # type: str self.is_sequential = is_sequential @@ -18317,43 +24518,43 @@ class FtpReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or Expression with - resultType string). - :type wildcard_folder_path: any - :param wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType + :vartype recursive: any + :ivar wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_file_name: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype wildcard_folder_path: any + :ivar wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType + string). + :vartype wildcard_file_name: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any - :param delete_files_after_completion: Indicates whether the source files need to be deleted + :vartype partition_root_path: any + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: any - :param file_list_path: Point to a text file that lists each file (relative path to the path + :vartype delete_files_after_completion: any + :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: any - :param use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. - :type use_binary_transfer: bool - :param disable_chunking: If true, disable parallel reading within each file. Default is false. + :vartype file_list_path: any + :ivar use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. + :vartype use_binary_transfer: bool + :ivar disable_chunking: If true, disable parallel reading within each file. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_chunking: any + :vartype disable_chunking: any """ _validation = { @@ -18393,6 +24594,43 @@ def __init__( disable_chunking: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or Expression with + resultType string). + :paramtype wildcard_folder_path: any + :keyword wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType + string). + :paramtype wildcard_file_name: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: any + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: any + :keyword use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. + :paramtype use_binary_transfer: bool + :keyword disable_chunking: If true, disable parallel reading within each file. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_chunking: any + """ super(FtpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'FtpReadSettings' # type: str self.recursive = recursive @@ -18411,44 +24649,44 @@ class FtpServerLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. Host name of the FTP server. Type: string (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. Host name of the FTP server. Type: string (or Expression with resultType string). - :type host: any - :param port: The TCP port number that the FTP server uses to listen for client connections. + :vartype host: any + :ivar port: The TCP port number that the FTP server uses to listen for client connections. Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: any - :param authentication_type: The authentication type to be used to connect to the FTP server. + :vartype port: any + :ivar authentication_type: The authentication type to be used to connect to the FTP server. Possible values include: "Basic", "Anonymous". - :type authentication_type: str or ~azure.mgmt.datafactory.models.FtpAuthenticationType - :param user_name: Username to logon the FTP server. Type: string (or Expression with resultType + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.FtpAuthenticationType + :ivar user_name: Username to logon the FTP server. Type: string (or Expression with resultType string). - :type user_name: any - :param password: Password to logon the FTP server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype user_name: any + :ivar password: Password to logon the FTP server. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param enable_ssl: If true, connect to the FTP server over SSL/TLS channel. Default value is + :vartype encrypted_credential: any + :ivar enable_ssl: If true, connect to the FTP server over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). - :type enable_ssl: any - :param enable_server_certificate_validation: If true, validate the FTP server SSL certificate + :vartype enable_ssl: any + :ivar enable_server_certificate_validation: If true, validate the FTP server SSL certificate when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: any + :vartype enable_server_certificate_validation: any """ _validation = { @@ -18491,6 +24729,44 @@ def __init__( enable_server_certificate_validation: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. Host name of the FTP server. Type: string (or Expression with + resultType string). + :paramtype host: any + :keyword port: The TCP port number that the FTP server uses to listen for client connections. + Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. + :paramtype port: any + :keyword authentication_type: The authentication type to be used to connect to the FTP server. + Possible values include: "Basic", "Anonymous". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.FtpAuthenticationType + :keyword user_name: Username to logon the FTP server. Type: string (or Expression with + resultType string). + :paramtype user_name: any + :keyword password: Password to logon the FTP server. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword enable_ssl: If true, connect to the FTP server over SSL/TLS channel. Default value is + true. Type: boolean (or Expression with resultType boolean). + :paramtype enable_ssl: any + :keyword enable_server_certificate_validation: If true, validate the FTP server SSL certificate + when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with + resultType boolean). + :paramtype enable_server_certificate_validation: any + """ super(FtpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'FtpServer' # type: str self.host = host @@ -18508,17 +24784,17 @@ class FtpServerLocation(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any + :vartype file_name: any """ _validation = { @@ -18540,6 +24816,17 @@ def __init__( file_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + """ super(FtpServerLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) self.type = 'FtpServerLocation' # type: str @@ -18547,11 +24834,11 @@ def __init__( class GetDataFactoryOperationStatusResponse(msrest.serialization.Model): """Response body structure for get data factory operation status. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param status: Status of the operation. - :type status: str + :vartype additional_properties: dict[str, any] + :ivar status: Status of the operation. + :vartype status: str """ _attribute_map = { @@ -18566,6 +24853,13 @@ def __init__( status: Optional[str] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword status: Status of the operation. + :paramtype status: str + """ super(GetDataFactoryOperationStatusResponse, self).__init__(**kwargs) self.additional_properties = additional_properties self.status = status @@ -18576,31 +24870,31 @@ class GetMetadataActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param dataset: Required. GetMetadata activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param field_list: Fields of metadata to get from dataset. - :type field_list: list[any] - :param store_settings: GetMetadata activity store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param format_settings: GetMetadata activity format settings. - :type format_settings: ~azure.mgmt.datafactory.models.FormatReadSettings + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar dataset: Required. GetMetadata activity dataset reference. + :vartype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :ivar field_list: Fields of metadata to get from dataset. + :vartype field_list: list[any] + :ivar store_settings: GetMetadata activity store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :ivar format_settings: GetMetadata activity format settings. + :vartype format_settings: ~azure.mgmt.datafactory.models.FormatReadSettings """ _validation = { @@ -18640,6 +24934,31 @@ def __init__( format_settings: Optional["FormatReadSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword dataset: Required. GetMetadata activity dataset reference. + :paramtype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :keyword field_list: Fields of metadata to get from dataset. + :paramtype field_list: list[any] + :keyword store_settings: GetMetadata activity store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :keyword format_settings: GetMetadata activity format settings. + :paramtype format_settings: ~azure.mgmt.datafactory.models.FormatReadSettings + """ super(GetMetadataActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'GetMetadata' # type: str self.dataset = dataset @@ -18651,8 +24970,8 @@ def __init__( class GetSsisObjectMetadataRequest(msrest.serialization.Model): """The request payload of get SSIS object metadata. - :param metadata_path: Metadata path. - :type metadata_path: str + :ivar metadata_path: Metadata path. + :vartype metadata_path: str """ _attribute_map = { @@ -18665,6 +24984,10 @@ def __init__( metadata_path: Optional[str] = None, **kwargs ): + """ + :keyword metadata_path: Metadata path. + :paramtype metadata_path: str + """ super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) self.metadata_path = metadata_path @@ -18674,14 +24997,14 @@ class GitHubAccessTokenRequest(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param git_hub_access_code: Required. GitHub access code. - :type git_hub_access_code: str - :param git_hub_client_id: GitHub application client ID. - :type git_hub_client_id: str - :param git_hub_client_secret: GitHub bring your own app client secret information. - :type git_hub_client_secret: ~azure.mgmt.datafactory.models.GitHubClientSecret - :param git_hub_access_token_base_url: Required. GitHub access token base URL. - :type git_hub_access_token_base_url: str + :ivar git_hub_access_code: Required. GitHub access code. + :vartype git_hub_access_code: str + :ivar git_hub_client_id: GitHub application client ID. + :vartype git_hub_client_id: str + :ivar git_hub_client_secret: GitHub bring your own app client secret information. + :vartype git_hub_client_secret: ~azure.mgmt.datafactory.models.GitHubClientSecret + :ivar git_hub_access_token_base_url: Required. GitHub access token base URL. + :vartype git_hub_access_token_base_url: str """ _validation = { @@ -18705,6 +25028,16 @@ def __init__( git_hub_client_secret: Optional["GitHubClientSecret"] = None, **kwargs ): + """ + :keyword git_hub_access_code: Required. GitHub access code. + :paramtype git_hub_access_code: str + :keyword git_hub_client_id: GitHub application client ID. + :paramtype git_hub_client_id: str + :keyword git_hub_client_secret: GitHub bring your own app client secret information. + :paramtype git_hub_client_secret: ~azure.mgmt.datafactory.models.GitHubClientSecret + :keyword git_hub_access_token_base_url: Required. GitHub access token base URL. + :paramtype git_hub_access_token_base_url: str + """ super(GitHubAccessTokenRequest, self).__init__(**kwargs) self.git_hub_access_code = git_hub_access_code self.git_hub_client_id = git_hub_client_id @@ -18715,8 +25048,8 @@ def __init__( class GitHubAccessTokenResponse(msrest.serialization.Model): """Get GitHub access token response definition. - :param git_hub_access_token: GitHub access token. - :type git_hub_access_token: str + :ivar git_hub_access_token: GitHub access token. + :vartype git_hub_access_token: str """ _attribute_map = { @@ -18729,6 +25062,10 @@ def __init__( git_hub_access_token: Optional[str] = None, **kwargs ): + """ + :keyword git_hub_access_token: GitHub access token. + :paramtype git_hub_access_token: str + """ super(GitHubAccessTokenResponse, self).__init__(**kwargs) self.git_hub_access_token = git_hub_access_token @@ -18736,10 +25073,10 @@ def __init__( class GitHubClientSecret(msrest.serialization.Model): """Client secret information for factory's bring your own app repository configuration. - :param byoa_secret_akv_url: Bring your own app client secret AKV URL. - :type byoa_secret_akv_url: str - :param byoa_secret_name: Bring your own app client secret name in AKV. - :type byoa_secret_name: str + :ivar byoa_secret_akv_url: Bring your own app client secret AKV URL. + :vartype byoa_secret_akv_url: str + :ivar byoa_secret_name: Bring your own app client secret name in AKV. + :vartype byoa_secret_name: str """ _attribute_map = { @@ -18754,6 +25091,12 @@ def __init__( byoa_secret_name: Optional[str] = None, **kwargs ): + """ + :keyword byoa_secret_akv_url: Bring your own app client secret AKV URL. + :paramtype byoa_secret_akv_url: str + :keyword byoa_secret_name: Bring your own app client secret name in AKV. + :paramtype byoa_secret_name: str + """ super(GitHubClientSecret, self).__init__(**kwargs) self.byoa_secret_akv_url = byoa_secret_akv_url self.byoa_secret_name = byoa_secret_name @@ -18764,11 +25107,11 @@ class GlobalParameterSpecification(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. Global Parameter type. Possible values include: "Object", "String", + :ivar type: Required. Global Parameter type. Possible values include: "Object", "String", "Int", "Float", "Bool", "Array". - :type type: str or ~azure.mgmt.datafactory.models.GlobalParameterType - :param value: Required. Value of parameter. - :type value: any + :vartype type: str or ~azure.mgmt.datafactory.models.GlobalParameterType + :ivar value: Required. Value of parameter. + :vartype value: any """ _validation = { @@ -18788,6 +25131,13 @@ def __init__( value: Any, **kwargs ): + """ + :keyword type: Required. Global Parameter type. Possible values include: "Object", "String", + "Int", "Float", "Bool", "Array". + :paramtype type: str or ~azure.mgmt.datafactory.models.GlobalParameterType + :keyword value: Required. Value of parameter. + :paramtype value: any + """ super(GlobalParameterSpecification, self).__init__(**kwargs) self.type = type self.value = value @@ -18798,56 +25148,56 @@ class GoogleAdWordsLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param client_customer_id: Required. The Client customer ID of the AdWords account that you - want to fetch report data for. - :type client_customer_id: any - :param developer_token: Required. The developer token associated with the manager account that + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar client_customer_id: Required. The Client customer ID of the AdWords account that you want + to fetch report data for. + :vartype client_customer_id: any + :ivar developer_token: Required. The developer token associated with the manager account that you use to grant access to the AdWords API. - :type developer_token: ~azure.mgmt.datafactory.models.SecretBase - :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for + :vartype developer_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar authentication_type: Required. The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values include: "ServiceAuthentication", "UserAuthentication". - :type authentication_type: str or + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType - :param refresh_token: The refresh token obtained from Google for authorizing access to AdWords + :ivar refresh_token: The refresh token obtained from Google for authorizing access to AdWords for UserAuthentication. - :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase - :param client_id: The client id of the google application used to acquire the refresh token. + :vartype refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar client_id: The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). - :type client_id: any - :param client_secret: The client secret of the google application used to acquire the refresh + :vartype client_id: any + :ivar client_secret: The client secret of the google application used to acquire the refresh token. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param email: The service account email ID that is used for ServiceAuthentication and can only + :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar email: The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. - :type email: any - :param key_file_path: The full path to the .p12 key file that is used to authenticate the + :vartype email: any + :ivar key_file_path: The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. - :type key_file_path: any - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + :vartype key_file_path: any + :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: any - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + :vartype trusted_cert_path: any + :ivar use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_system_trust_store: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -18898,6 +25248,56 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword client_customer_id: Required. The Client customer ID of the AdWords account that you + want to fetch report data for. + :paramtype client_customer_id: any + :keyword developer_token: Required. The developer token associated with the manager account + that you use to grant access to the AdWords API. + :paramtype developer_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword authentication_type: Required. The OAuth 2.0 authentication mechanism used for + authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values + include: "ServiceAuthentication", "UserAuthentication". + :paramtype authentication_type: str or + ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType + :keyword refresh_token: The refresh token obtained from Google for authorizing access to + AdWords for UserAuthentication. + :paramtype refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword client_id: The client id of the google application used to acquire the refresh token. + Type: string (or Expression with resultType string). + :paramtype client_id: any + :keyword client_secret: The client secret of the google application used to acquire the refresh + token. + :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword email: The service account email ID that is used for ServiceAuthentication and can + only be used on self-hosted IR. + :paramtype email: any + :keyword key_file_path: The full path to the .p12 key file that is used to authenticate the + service account email address and can only be used on self-hosted IR. + :paramtype key_file_path: any + :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates + for verifying the server when connecting over SSL. This property can only be set when using SSL + on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :paramtype trusted_cert_path: any + :keyword use_system_trust_store: Specifies whether to use a CA certificate from the system + trust store or from a specified PEM file. The default value is false. + :paramtype use_system_trust_store: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(GoogleAdWordsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'GoogleAdWords' # type: str self.client_customer_id = client_customer_id @@ -18918,30 +25318,30 @@ class GoogleAdWordsObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -18976,6 +25376,30 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(GoogleAdWordsObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'GoogleAdWordsObject' # type: str self.table_name = table_name @@ -18986,32 +25410,32 @@ class GoogleAdWordsSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -19043,6 +25467,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'GoogleAdWordsSource' # type: str self.query = query @@ -19053,58 +25503,58 @@ class GoogleBigQueryLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param project: Required. The default BigQuery project to query against. - :type project: any - :param additional_projects: A comma-separated list of public BigQuery projects to access. - :type additional_projects: any - :param request_google_drive_scope: Whether to request access to Google Drive. Allowing Google + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar project: Required. The default BigQuery project to query against. + :vartype project: any + :ivar additional_projects: A comma-separated list of public BigQuery projects to access. + :vartype additional_projects: any + :ivar request_google_drive_scope: Whether to request access to Google Drive. Allowing Google Drive access enables support for federated tables that combine BigQuery data with data from Google Drive. The default value is false. - :type request_google_drive_scope: any - :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for + :vartype request_google_drive_scope: any + :ivar authentication_type: Required. The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values include: "ServiceAuthentication", "UserAuthentication". - :type authentication_type: str or + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.GoogleBigQueryAuthenticationType - :param refresh_token: The refresh token obtained from Google for authorizing access to BigQuery + :ivar refresh_token: The refresh token obtained from Google for authorizing access to BigQuery for UserAuthentication. - :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase - :param client_id: The client id of the google application used to acquire the refresh token. + :vartype refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar client_id: The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). - :type client_id: any - :param client_secret: The client secret of the google application used to acquire the refresh + :vartype client_id: any + :ivar client_secret: The client secret of the google application used to acquire the refresh token. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param email: The service account email ID that is used for ServiceAuthentication and can only + :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar email: The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. - :type email: any - :param key_file_path: The full path to the .p12 key file that is used to authenticate the + :vartype email: any + :ivar key_file_path: The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. - :type key_file_path: any - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + :vartype key_file_path: any + :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: any - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + :vartype trusted_cert_path: any + :ivar use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_system_trust_store: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -19156,6 +25606,58 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword project: Required. The default BigQuery project to query against. + :paramtype project: any + :keyword additional_projects: A comma-separated list of public BigQuery projects to access. + :paramtype additional_projects: any + :keyword request_google_drive_scope: Whether to request access to Google Drive. Allowing Google + Drive access enables support for federated tables that combine BigQuery data with data from + Google Drive. The default value is false. + :paramtype request_google_drive_scope: any + :keyword authentication_type: Required. The OAuth 2.0 authentication mechanism used for + authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values + include: "ServiceAuthentication", "UserAuthentication". + :paramtype authentication_type: str or + ~azure.mgmt.datafactory.models.GoogleBigQueryAuthenticationType + :keyword refresh_token: The refresh token obtained from Google for authorizing access to + BigQuery for UserAuthentication. + :paramtype refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword client_id: The client id of the google application used to acquire the refresh token. + Type: string (or Expression with resultType string). + :paramtype client_id: any + :keyword client_secret: The client secret of the google application used to acquire the refresh + token. + :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword email: The service account email ID that is used for ServiceAuthentication and can + only be used on self-hosted IR. + :paramtype email: any + :keyword key_file_path: The full path to the .p12 key file that is used to authenticate the + service account email address and can only be used on self-hosted IR. + :paramtype key_file_path: any + :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates + for verifying the server when connecting over SSL. This property can only be set when using SSL + on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :paramtype trusted_cert_path: any + :keyword use_system_trust_store: Specifies whether to use a CA certificate from the system + trust store or from a specified PEM file. The default value is false. + :paramtype use_system_trust_store: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(GoogleBigQueryLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'GoogleBigQuery' # type: str self.project = project @@ -19177,37 +25679,37 @@ class GoogleBigQueryObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using database + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using database + table properties instead. - :type table_name: any - :param table: The table name of the Google BigQuery. Type: string (or Expression with - resultType string). - :type table: any - :param dataset: The database name of the Google BigQuery. Type: string (or Expression with + :vartype table_name: any + :ivar table: The table name of the Google BigQuery. Type: string (or Expression with resultType + string). + :vartype table: any + :ivar dataset: The database name of the Google BigQuery. Type: string (or Expression with resultType string). - :type dataset: any + :vartype dataset: any """ _validation = { @@ -19246,6 +25748,37 @@ def __init__( dataset: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using database + table + properties instead. + :paramtype table_name: any + :keyword table: The table name of the Google BigQuery. Type: string (or Expression with + resultType string). + :paramtype table: any + :keyword dataset: The database name of the Google BigQuery. Type: string (or Expression with + resultType string). + :paramtype dataset: any + """ super(GoogleBigQueryObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'GoogleBigQueryObject' # type: str self.table_name = table_name @@ -19258,32 +25791,32 @@ class GoogleBigQuerySource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -19315,6 +25848,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'GoogleBigQuerySource' # type: str self.query = query @@ -19325,34 +25884,34 @@ class GoogleCloudStorageLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param access_key_id: The access key identifier of the Google Cloud Storage Identity and Access + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar access_key_id: The access key identifier of the Google Cloud Storage Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). - :type access_key_id: any - :param secret_access_key: The secret access key of the Google Cloud Storage Identity and Access + :vartype access_key_id: any + :ivar secret_access_key: The secret access key of the Google Cloud Storage Identity and Access Management (IAM) user. - :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_url: This value specifies the endpoint to access with the Google Cloud Storage + :vartype secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar service_url: This value specifies the endpoint to access with the Google Cloud Storage Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). - :type service_url: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype service_url: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -19386,6 +25945,34 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword access_key_id: The access key identifier of the Google Cloud Storage Identity and + Access Management (IAM) user. Type: string (or Expression with resultType string). + :paramtype access_key_id: any + :keyword secret_access_key: The secret access key of the Google Cloud Storage Identity and + Access Management (IAM) user. + :paramtype secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword service_url: This value specifies the endpoint to access with the Google Cloud Storage + Connector. This is an optional property; change it only if you want to try a different service + endpoint or want to switch between https and http. Type: string (or Expression with resultType + string). + :paramtype service_url: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(GoogleCloudStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'GoogleCloudStorage' # type: str self.access_key_id = access_key_id @@ -19399,23 +25986,23 @@ class GoogleCloudStorageLocation(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any - :param bucket_name: Specify the bucketName of Google Cloud Storage. Type: string (or Expression + :vartype file_name: any + :ivar bucket_name: Specify the bucketName of Google Cloud Storage. Type: string (or Expression with resultType string). - :type bucket_name: any - :param version: Specify the version of Google Cloud Storage. Type: string (or Expression with + :vartype bucket_name: any + :ivar version: Specify the version of Google Cloud Storage. Type: string (or Expression with resultType string). - :type version: any + :vartype version: any """ _validation = { @@ -19441,6 +26028,23 @@ def __init__( version: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + :keyword bucket_name: Specify the bucketName of Google Cloud Storage. Type: string (or + Expression with resultType string). + :paramtype bucket_name: any + :keyword version: Specify the version of Google Cloud Storage. Type: string (or Expression with + resultType string). + :paramtype version: any + """ super(GoogleCloudStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) self.type = 'GoogleCloudStorageLocation' # type: str self.bucket_name = bucket_name @@ -19452,47 +26056,47 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param wildcard_folder_path: Google Cloud Storage wildcardFolderPath. Type: string (or + :vartype recursive: any + :ivar wildcard_folder_path: Google Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: any - :param wildcard_file_name: Google Cloud Storage wildcardFileName. Type: string (or Expression + :vartype wildcard_folder_path: any + :ivar wildcard_file_name: Google Cloud Storage wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: any - :param prefix: The prefix filter for the Google Cloud Storage object name. Type: string (or + :vartype wildcard_file_name: any + :ivar prefix: The prefix filter for the Google Cloud Storage object name. Type: string (or Expression with resultType string). - :type prefix: any - :param file_list_path: Point to a text file that lists each file (relative path to the path + :vartype prefix: any + :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype file_list_path: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any - :param delete_files_after_completion: Indicates whether the source files need to be deleted + :vartype partition_root_path: any + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: any - :param modified_datetime_start: The start of file's modified datetime. Type: string (or + :vartype delete_files_after_completion: any + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any + :vartype modified_datetime_end: any """ _validation = { @@ -19534,6 +26138,47 @@ def __init__( modified_datetime_end: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword wildcard_folder_path: Google Cloud Storage wildcardFolderPath. Type: string (or + Expression with resultType string). + :paramtype wildcard_folder_path: any + :keyword wildcard_file_name: Google Cloud Storage wildcardFileName. Type: string (or Expression + with resultType string). + :paramtype wildcard_file_name: any + :keyword prefix: The prefix filter for the Google Cloud Storage object name. Type: string (or + Expression with resultType string). + :paramtype prefix: any + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: any + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + """ super(GoogleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'GoogleCloudStorageReadSettings' # type: str self.recursive = recursive @@ -19553,28 +26198,28 @@ class GreenplumLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: An ODBC connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_string: any + :ivar pwd: The Azure key vault secret reference of password in connection string. + :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -19606,6 +26251,28 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword pwd: The Azure key vault secret reference of password in connection string. + :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(GreenplumLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Greenplum' # type: str self.connection_string = connection_string @@ -19618,32 +26285,32 @@ class GreenplumSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -19675,6 +26342,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'GreenplumSource' # type: str self.query = query @@ -19685,36 +26378,36 @@ class GreenplumTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param table: The table name of Greenplum. Type: string (or Expression with resultType string). - :type table: any - :param schema_type_properties_schema: The schema name of Greenplum. Type: string (or Expression + :vartype table_name: any + :ivar table: The table name of Greenplum. Type: string (or Expression with resultType string). + :vartype table: any + :ivar schema_type_properties_schema: The schema name of Greenplum. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any + :vartype schema_type_properties_schema: any """ _validation = { @@ -19753,6 +26446,37 @@ def __init__( schema_type_properties_schema: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword table: The table name of Greenplum. Type: string (or Expression with resultType + string). + :paramtype table: any + :keyword schema_type_properties_schema: The schema name of Greenplum. Type: string (or + Expression with resultType string). + :paramtype schema_type_properties_schema: any + """ super(GreenplumTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'GreenplumTable' # type: str self.table_name = table_name @@ -19765,51 +26489,51 @@ class HBaseLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. The IP address or host name of the HBase server. (i.e. 192.168.222.160). - :type host: any - :param port: The TCP port that the HBase instance uses to listen for client connections. The + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. The IP address or host name of the HBase server. (i.e. 192.168.222.160). + :vartype host: any + :ivar port: The TCP port that the HBase instance uses to listen for client connections. The default value is 9090. - :type port: any - :param http_path: The partial URL corresponding to the HBase server. (i.e. + :vartype port: any + :ivar http_path: The partial URL corresponding to the HBase server. (i.e. /gateway/sandbox/hbase/version). - :type http_path: any - :param authentication_type: Required. The authentication mechanism to use to connect to the + :vartype http_path: any + :ivar authentication_type: Required. The authentication mechanism to use to connect to the HBase server. Possible values include: "Anonymous", "Basic". - :type authentication_type: str or ~azure.mgmt.datafactory.models.HBaseAuthenticationType - :param username: The user name used to connect to the HBase instance. - :type username: any - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.HBaseAuthenticationType + :ivar username: The user name used to connect to the HBase instance. + :vartype username: any + :ivar password: The password corresponding to the user name. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :type enable_ssl: any - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + :vartype enable_ssl: any + :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: any - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + :vartype trusted_cert_path: any + :ivar allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: any - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + :vartype allow_host_name_cn_mismatch: any + :ivar allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype allow_self_signed_server_cert: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -19859,6 +26583,52 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. The IP address or host name of the HBase server. (i.e. + 192.168.222.160). + :paramtype host: any + :keyword port: The TCP port that the HBase instance uses to listen for client connections. The + default value is 9090. + :paramtype port: any + :keyword http_path: The partial URL corresponding to the HBase server. (i.e. + /gateway/sandbox/hbase/version). + :paramtype http_path: any + :keyword authentication_type: Required. The authentication mechanism to use to connect to the + HBase server. Possible values include: "Anonymous", "Basic". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.HBaseAuthenticationType + :keyword username: The user name used to connect to the HBase instance. + :paramtype username: any + :keyword password: The password corresponding to the user name. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword enable_ssl: Specifies whether the connections to the server are encrypted using SSL. + The default value is false. + :paramtype enable_ssl: any + :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates + for verifying the server when connecting over SSL. This property can only be set when using SSL + on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :paramtype trusted_cert_path: any + :keyword allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :paramtype allow_host_name_cn_mismatch: any + :keyword allow_self_signed_server_cert: Specifies whether to allow self-signed certificates + from the server. The default value is false. + :paramtype allow_self_signed_server_cert: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(HBaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'HBase' # type: str self.host = host @@ -19879,30 +26649,30 @@ class HBaseObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -19937,6 +26707,30 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(HBaseObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'HBaseObject' # type: str self.table_name = table_name @@ -19947,32 +26741,32 @@ class HBaseSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -20004,6 +26798,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'HBaseSource' # type: str self.query = query @@ -20014,34 +26834,34 @@ class HdfsLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param url: Required. The URL of the HDFS service endpoint, e.g. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar url: Required. The URL of the HDFS service endpoint, e.g. http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). - :type url: any - :param authentication_type: Type of authentication used to connect to the HDFS. Possible values + :vartype url: any + :ivar authentication_type: Type of authentication used to connect to the HDFS. Possible values are: Anonymous and Windows. Type: string (or Expression with resultType string). - :type authentication_type: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype authentication_type: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param user_name: User name for Windows authentication. Type: string (or Expression with + :vartype encrypted_credential: any + :ivar user_name: User name for Windows authentication. Type: string (or Expression with resultType string). - :type user_name: any - :param password: Password for Windows authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase + :vartype user_name: any + :ivar password: Password for Windows authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -20078,6 +26898,34 @@ def __init__( password: Optional["SecretBase"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword url: Required. The URL of the HDFS service endpoint, e.g. + http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). + :paramtype url: any + :keyword authentication_type: Type of authentication used to connect to the HDFS. Possible + values are: Anonymous and Windows. Type: string (or Expression with resultType string). + :paramtype authentication_type: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword user_name: User name for Windows authentication. Type: string (or Expression with + resultType string). + :paramtype user_name: any + :keyword password: Password for Windows authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + """ super(HdfsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Hdfs' # type: str self.url = url @@ -20092,17 +26940,17 @@ class HdfsLocation(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any + :vartype file_name: any """ _validation = { @@ -20124,6 +26972,17 @@ def __init__( file_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + """ super(HdfsLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) self.type = 'HdfsLocation' # type: str @@ -20133,46 +26992,46 @@ class HdfsReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or Expression with + :vartype recursive: any + :ivar wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: any - :param wildcard_file_name: HDFS wildcardFileName. Type: string (or Expression with resultType + :vartype wildcard_folder_path: any + :ivar wildcard_file_name: HDFS wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: any - :param file_list_path: Point to a text file that lists each file (relative path to the path + :vartype wildcard_file_name: any + :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype file_list_path: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any - :param modified_datetime_start: The start of file's modified datetime. Type: string (or + :vartype partition_root_path: any + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any - :param distcp_settings: Specifies Distcp-related settings. - :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings - :param delete_files_after_completion: Indicates whether the source files need to be deleted + :vartype modified_datetime_end: any + :ivar distcp_settings: Specifies Distcp-related settings. + :vartype distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: any + :vartype delete_files_after_completion: any """ _validation = { @@ -20214,6 +27073,46 @@ def __init__( delete_files_after_completion: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or Expression with + resultType string). + :paramtype wildcard_folder_path: any + :keyword wildcard_file_name: HDFS wildcardFileName. Type: string (or Expression with resultType + string). + :paramtype wildcard_file_name: any + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + :keyword distcp_settings: Specifies Distcp-related settings. + :paramtype distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: any + """ super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'HdfsReadSettings' # type: str self.recursive = recursive @@ -20233,28 +27132,28 @@ class HdfsSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param distcp_settings: Specifies Distcp-related settings. - :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + :vartype recursive: any + :ivar distcp_settings: Specifies Distcp-related settings. + :vartype distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings """ _validation = { @@ -20284,6 +27183,28 @@ def __init__( distcp_settings: Optional["DistcpSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword distcp_settings: Specifies Distcp-related settings. + :paramtype distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + """ super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'HdfsSource' # type: str self.recursive = recursive @@ -20295,40 +27216,40 @@ class HDInsightHiveActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[any] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param script_path: Script path. Type: string (or Expression with resultType string). - :type script_path: any - :param script_linked_service: Script linked service reference. - :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param defines: Allows user to specify defines for Hive job request. - :type defines: dict[str, any] - :param variables: User specified arguments under hivevar namespace. - :type variables: list[any] - :param query_timeout: Query timeout value (in minutes). Effective when the HDInsight cluster - is with ESP (Enterprise Security Package). - :type query_timeout: int + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar storage_linked_services: Storage linked service references. + :vartype storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :ivar arguments: User specified arguments to HDInsightActivity. + :vartype arguments: list[any] + :ivar get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :vartype get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :ivar script_path: Script path. Type: string (or Expression with resultType string). + :vartype script_path: any + :ivar script_linked_service: Script linked service reference. + :vartype script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar defines: Allows user to specify defines for Hive job request. + :vartype defines: dict[str, any] + :ivar variables: User specified arguments under hivevar namespace. + :vartype variables: list[any] + :ivar query_timeout: Query timeout value (in minutes). Effective when the HDInsight cluster is + with ESP (Enterprise Security Package). + :vartype query_timeout: int """ _validation = { @@ -20375,6 +27296,42 @@ def __init__( query_timeout: Optional[int] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword storage_linked_services: Storage linked service references. + :paramtype storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :keyword arguments: User specified arguments to HDInsightActivity. + :paramtype arguments: list[any] + :keyword get_debug_info: Debug info option. Possible values include: "None", "Always", + "Failure". + :paramtype get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :keyword script_path: Script path. Type: string (or Expression with resultType string). + :paramtype script_path: any + :keyword script_linked_service: Script linked service reference. + :paramtype script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword defines: Allows user to specify defines for Hive job request. + :paramtype defines: dict[str, any] + :keyword variables: User specified arguments under hivevar namespace. + :paramtype variables: list[any] + :keyword query_timeout: Query timeout value (in minutes). Effective when the HDInsight cluster + is with ESP (Enterprise Security Package). + :paramtype query_timeout: int + """ super(HDInsightHiveActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'HDInsightHive' # type: str self.storage_linked_services = storage_linked_services @@ -20392,42 +27349,42 @@ class HDInsightLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param cluster_uri: Required. HDInsight cluster URI. Type: string (or Expression with - resultType string). - :type cluster_uri: any - :param user_name: HDInsight cluster user name. Type: string (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar cluster_uri: Required. HDInsight cluster URI. Type: string (or Expression with resultType string). - :type user_name: any - :param password: HDInsight cluster password. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param linked_service_name: The Azure Storage linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param hcatalog_linked_service_name: A reference to the Azure SQL linked service that points to + :vartype cluster_uri: any + :ivar user_name: HDInsight cluster user name. Type: string (or Expression with resultType + string). + :vartype user_name: any + :ivar password: HDInsight cluster password. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar linked_service_name: The Azure Storage linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar hcatalog_linked_service_name: A reference to the Azure SQL linked service that points to the HCatalog database. - :type hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security + :vartype encrypted_credential: any + :ivar is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security Package). Type: Boolean. - :type is_esp_enabled: any - :param file_system: Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. + :vartype is_esp_enabled: any + :ivar file_system: Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. Type: string (or Expression with resultType string). - :type file_system: any + :vartype file_system: any """ _validation = { @@ -20470,6 +27427,42 @@ def __init__( file_system: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword cluster_uri: Required. HDInsight cluster URI. Type: string (or Expression with + resultType string). + :paramtype cluster_uri: any + :keyword user_name: HDInsight cluster user name. Type: string (or Expression with resultType + string). + :paramtype user_name: any + :keyword password: HDInsight cluster password. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword linked_service_name: The Azure Storage linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword hcatalog_linked_service_name: A reference to the Azure SQL linked service that points + to the HCatalog database. + :paramtype hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security + Package). Type: Boolean. + :paramtype is_esp_enabled: any + :keyword file_system: Specify the FileSystem if the main storage for the HDInsight is ADLS + Gen2. Type: string (or Expression with resultType string). + :paramtype file_system: any + """ super(HDInsightLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'HDInsight' # type: str self.cluster_uri = cluster_uri @@ -20487,39 +27480,39 @@ class HDInsightMapReduceActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[any] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param class_name: Required. Class name. Type: string (or Expression with resultType string). - :type class_name: any - :param jar_file_path: Required. Jar path. Type: string (or Expression with resultType string). - :type jar_file_path: any - :param jar_linked_service: Jar linked service reference. - :type jar_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param jar_libs: Jar libs. - :type jar_libs: list[any] - :param defines: Allows user to specify defines for the MapReduce job request. - :type defines: dict[str, any] + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar storage_linked_services: Storage linked service references. + :vartype storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :ivar arguments: User specified arguments to HDInsightActivity. + :vartype arguments: list[any] + :ivar get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :vartype get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :ivar class_name: Required. Class name. Type: string (or Expression with resultType string). + :vartype class_name: any + :ivar jar_file_path: Required. Jar path. Type: string (or Expression with resultType string). + :vartype jar_file_path: any + :ivar jar_linked_service: Jar linked service reference. + :vartype jar_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar jar_libs: Jar libs. + :vartype jar_libs: list[any] + :ivar defines: Allows user to specify defines for the MapReduce job request. + :vartype defines: dict[str, any] """ _validation = { @@ -20568,6 +27561,42 @@ def __init__( defines: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword storage_linked_services: Storage linked service references. + :paramtype storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :keyword arguments: User specified arguments to HDInsightActivity. + :paramtype arguments: list[any] + :keyword get_debug_info: Debug info option. Possible values include: "None", "Always", + "Failure". + :paramtype get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :keyword class_name: Required. Class name. Type: string (or Expression with resultType string). + :paramtype class_name: any + :keyword jar_file_path: Required. Jar path. Type: string (or Expression with resultType + string). + :paramtype jar_file_path: any + :keyword jar_linked_service: Jar linked service reference. + :paramtype jar_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword jar_libs: Jar libs. + :paramtype jar_libs: list[any] + :keyword defines: Allows user to specify defines for the MapReduce job request. + :paramtype defines: dict[str, any] + """ super(HDInsightMapReduceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'HDInsightMapReduce' # type: str self.storage_linked_services = storage_linked_services @@ -20585,120 +27614,119 @@ class HDInsightOnDemandLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param cluster_size: Required. Number of worker/data nodes in the cluster. Suggestion value: 4. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar cluster_size: Required. Number of worker/data nodes in the cluster. Suggestion value: 4. Type: string (or Expression with resultType string). - :type cluster_size: any - :param time_to_live: Required. The allowed idle time for the on-demand HDInsight cluster. + :vartype cluster_size: any + :ivar time_to_live: Required. The allowed idle time for the on-demand HDInsight cluster. Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string (or Expression with resultType string). - :type time_to_live: any - :param version: Required. Version of the HDInsight cluster.  Type: string (or Expression with + :vartype time_to_live: any + :ivar version: Required. Version of the HDInsight cluster.  Type: string (or Expression with resultType string). - :type version: any - :param linked_service_name: Required. Azure Storage linked service to be used by the on-demand + :vartype version: any + :ivar linked_service_name: Required. Azure Storage linked service to be used by the on-demand cluster for storing and processing data. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param host_subscription_id: Required. The customer’s subscription to host the cluster. Type: + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar host_subscription_id: Required. The customer’s subscription to host the cluster. Type: string (or Expression with resultType string). - :type host_subscription_id: any - :param service_principal_id: The service principal id for the hostSubscriptionId. Type: string + :vartype host_subscription_id: any + :ivar service_principal_id: The service principal id for the hostSubscriptionId. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: The key for the service principal id. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. The Tenant id/name to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: any - :param cluster_resource_group: Required. The resource group where the cluster belongs. Type: + :vartype service_principal_id: any + :ivar service_principal_key: The key for the service principal id. + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: Required. The Tenant id/name to which the service principal belongs. Type: string + (or Expression with resultType string). + :vartype tenant: any + :ivar cluster_resource_group: Required. The resource group where the cluster belongs. Type: string (or Expression with resultType string). - :type cluster_resource_group: any - :param cluster_name_prefix: The prefix of cluster name, postfix will be distinct with - timestamp. Type: string (or Expression with resultType string). - :type cluster_name_prefix: any - :param cluster_user_name: The username to access the cluster. Type: string (or Expression with + :vartype cluster_resource_group: any + :ivar cluster_name_prefix: The prefix of cluster name, postfix will be distinct with timestamp. + Type: string (or Expression with resultType string). + :vartype cluster_name_prefix: any + :ivar cluster_user_name: The username to access the cluster. Type: string (or Expression with resultType string). - :type cluster_user_name: any - :param cluster_password: The password to access the cluster. - :type cluster_password: ~azure.mgmt.datafactory.models.SecretBase - :param cluster_ssh_user_name: The username to SSH remotely connect to cluster’s node (for + :vartype cluster_user_name: any + :ivar cluster_password: The password to access the cluster. + :vartype cluster_password: ~azure.mgmt.datafactory.models.SecretBase + :ivar cluster_ssh_user_name: The username to SSH remotely connect to cluster’s node (for Linux). Type: string (or Expression with resultType string). - :type cluster_ssh_user_name: any - :param cluster_ssh_password: The password to SSH remotely connect cluster’s node (for Linux). - :type cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase - :param additional_linked_service_names: Specifies additional storage accounts for the HDInsight + :vartype cluster_ssh_user_name: any + :ivar cluster_ssh_password: The password to SSH remotely connect cluster’s node (for Linux). + :vartype cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase + :ivar additional_linked_service_names: Specifies additional storage accounts for the HDInsight linked service so that the Data Factory service can register them on your behalf. - :type additional_linked_service_names: + :vartype additional_linked_service_names: list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param hcatalog_linked_service_name: The name of Azure SQL linked service that point to the + :ivar hcatalog_linked_service_name: The name of Azure SQL linked service that point to the HCatalog database. The on-demand HDInsight cluster is created by using the Azure SQL database as the metastore. - :type hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param cluster_type: The cluster type. Type: string (or Expression with resultType string). - :type cluster_type: any - :param spark_version: The version of spark if the cluster type is 'spark'. Type: string (or + :vartype hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar cluster_type: The cluster type. Type: string (or Expression with resultType string). + :vartype cluster_type: any + :ivar spark_version: The version of spark if the cluster type is 'spark'. Type: string (or Expression with resultType string). - :type spark_version: any - :param core_configuration: Specifies the core configuration parameters (as in core-site.xml) - for the HDInsight cluster to be created. - :type core_configuration: any - :param h_base_configuration: Specifies the HBase configuration parameters (hbase-site.xml) for + :vartype spark_version: any + :ivar core_configuration: Specifies the core configuration parameters (as in core-site.xml) for + the HDInsight cluster to be created. + :vartype core_configuration: any + :ivar h_base_configuration: Specifies the HBase configuration parameters (hbase-site.xml) for the HDInsight cluster. - :type h_base_configuration: any - :param hdfs_configuration: Specifies the HDFS configuration parameters (hdfs-site.xml) for the + :vartype h_base_configuration: any + :ivar hdfs_configuration: Specifies the HDFS configuration parameters (hdfs-site.xml) for the HDInsight cluster. - :type hdfs_configuration: any - :param hive_configuration: Specifies the hive configuration parameters (hive-site.xml) for the + :vartype hdfs_configuration: any + :ivar hive_configuration: Specifies the hive configuration parameters (hive-site.xml) for the HDInsight cluster. - :type hive_configuration: any - :param map_reduce_configuration: Specifies the MapReduce configuration parameters + :vartype hive_configuration: any + :ivar map_reduce_configuration: Specifies the MapReduce configuration parameters (mapred-site.xml) for the HDInsight cluster. - :type map_reduce_configuration: any - :param oozie_configuration: Specifies the Oozie configuration parameters (oozie-site.xml) for + :vartype map_reduce_configuration: any + :ivar oozie_configuration: Specifies the Oozie configuration parameters (oozie-site.xml) for the HDInsight cluster. - :type oozie_configuration: any - :param storm_configuration: Specifies the Storm configuration parameters (storm-site.xml) for + :vartype oozie_configuration: any + :ivar storm_configuration: Specifies the Storm configuration parameters (storm-site.xml) for the HDInsight cluster. - :type storm_configuration: any - :param yarn_configuration: Specifies the Yarn configuration parameters (yarn-site.xml) for the + :vartype storm_configuration: any + :ivar yarn_configuration: Specifies the Yarn configuration parameters (yarn-site.xml) for the HDInsight cluster. - :type yarn_configuration: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype yarn_configuration: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param head_node_size: Specifies the size of the head node for the HDInsight cluster. - :type head_node_size: any - :param data_node_size: Specifies the size of the data node for the HDInsight cluster. - :type data_node_size: any - :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for the HDInsight - cluster. - :type zookeeper_node_size: any - :param script_actions: Custom script actions to run on HDI ondemand cluster once it's up. - Please refer to + :vartype encrypted_credential: any + :ivar head_node_size: Specifies the size of the head node for the HDInsight cluster. + :vartype head_node_size: any + :ivar data_node_size: Specifies the size of the data node for the HDInsight cluster. + :vartype data_node_size: any + :ivar zookeeper_node_size: Specifies the size of the Zoo Keeper node for the HDInsight cluster. + :vartype zookeeper_node_size: any + :ivar script_actions: Custom script actions to run on HDI ondemand cluster once it's up. Please + refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. - :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] - :param virtual_network_id: The ARM resource ID for the vNet to which the cluster should be + :vartype script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] + :ivar virtual_network_id: The ARM resource ID for the vNet to which the cluster should be joined after creation. Type: string (or Expression with resultType string). - :type virtual_network_id: any - :param subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was + :vartype virtual_network_id: any + :ivar subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was specified, then this property is required. Type: string (or Expression with resultType string). - :type subnet_name: any - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype subnet_name: any + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -20799,6 +27827,120 @@ def __init__( credential: Optional["CredentialReference"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword cluster_size: Required. Number of worker/data nodes in the cluster. Suggestion value: + 4. Type: string (or Expression with resultType string). + :paramtype cluster_size: any + :keyword time_to_live: Required. The allowed idle time for the on-demand HDInsight cluster. + Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity + run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string + (or Expression with resultType string). + :paramtype time_to_live: any + :keyword version: Required. Version of the HDInsight cluster.  Type: string (or Expression with + resultType string). + :paramtype version: any + :keyword linked_service_name: Required. Azure Storage linked service to be used by the + on-demand cluster for storing and processing data. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword host_subscription_id: Required. The customer’s subscription to host the cluster. Type: + string (or Expression with resultType string). + :paramtype host_subscription_id: any + :keyword service_principal_id: The service principal id for the hostSubscriptionId. Type: + string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: The key for the service principal id. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: Required. The Tenant id/name to which the service principal belongs. Type: + string (or Expression with resultType string). + :paramtype tenant: any + :keyword cluster_resource_group: Required. The resource group where the cluster belongs. Type: + string (or Expression with resultType string). + :paramtype cluster_resource_group: any + :keyword cluster_name_prefix: The prefix of cluster name, postfix will be distinct with + timestamp. Type: string (or Expression with resultType string). + :paramtype cluster_name_prefix: any + :keyword cluster_user_name: The username to access the cluster. Type: string (or Expression + with resultType string). + :paramtype cluster_user_name: any + :keyword cluster_password: The password to access the cluster. + :paramtype cluster_password: ~azure.mgmt.datafactory.models.SecretBase + :keyword cluster_ssh_user_name: The username to SSH remotely connect to cluster’s node (for + Linux). Type: string (or Expression with resultType string). + :paramtype cluster_ssh_user_name: any + :keyword cluster_ssh_password: The password to SSH remotely connect cluster’s node (for Linux). + :paramtype cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase + :keyword additional_linked_service_names: Specifies additional storage accounts for the + HDInsight linked service so that the Data Factory service can register them on your behalf. + :paramtype additional_linked_service_names: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :keyword hcatalog_linked_service_name: The name of Azure SQL linked service that point to the + HCatalog database. The on-demand HDInsight cluster is created by using the Azure SQL database + as the metastore. + :paramtype hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword cluster_type: The cluster type. Type: string (or Expression with resultType string). + :paramtype cluster_type: any + :keyword spark_version: The version of spark if the cluster type is 'spark'. Type: string (or + Expression with resultType string). + :paramtype spark_version: any + :keyword core_configuration: Specifies the core configuration parameters (as in core-site.xml) + for the HDInsight cluster to be created. + :paramtype core_configuration: any + :keyword h_base_configuration: Specifies the HBase configuration parameters (hbase-site.xml) + for the HDInsight cluster. + :paramtype h_base_configuration: any + :keyword hdfs_configuration: Specifies the HDFS configuration parameters (hdfs-site.xml) for + the HDInsight cluster. + :paramtype hdfs_configuration: any + :keyword hive_configuration: Specifies the hive configuration parameters (hive-site.xml) for + the HDInsight cluster. + :paramtype hive_configuration: any + :keyword map_reduce_configuration: Specifies the MapReduce configuration parameters + (mapred-site.xml) for the HDInsight cluster. + :paramtype map_reduce_configuration: any + :keyword oozie_configuration: Specifies the Oozie configuration parameters (oozie-site.xml) for + the HDInsight cluster. + :paramtype oozie_configuration: any + :keyword storm_configuration: Specifies the Storm configuration parameters (storm-site.xml) for + the HDInsight cluster. + :paramtype storm_configuration: any + :keyword yarn_configuration: Specifies the Yarn configuration parameters (yarn-site.xml) for + the HDInsight cluster. + :paramtype yarn_configuration: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword head_node_size: Specifies the size of the head node for the HDInsight cluster. + :paramtype head_node_size: any + :keyword data_node_size: Specifies the size of the data node for the HDInsight cluster. + :paramtype data_node_size: any + :keyword zookeeper_node_size: Specifies the size of the Zoo Keeper node for the HDInsight + cluster. + :paramtype zookeeper_node_size: any + :keyword script_actions: Custom script actions to run on HDI ondemand cluster once it's up. + Please refer to + https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. + :paramtype script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] + :keyword virtual_network_id: The ARM resource ID for the vNet to which the cluster should be + joined after creation. Type: string (or Expression with resultType string). + :paramtype virtual_network_id: any + :keyword subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was + specified, then this property is required. Type: string (or Expression with resultType string). + :paramtype subnet_name: any + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(HDInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'HDInsightOnDemand' # type: str self.cluster_size = cluster_size @@ -20842,36 +27984,36 @@ class HDInsightPigActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. Type: array (or Expression - with resultType array). - :type arguments: any - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param script_path: Script path. Type: string (or Expression with resultType string). - :type script_path: any - :param script_linked_service: Script linked service reference. - :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param defines: Allows user to specify defines for Pig job request. - :type defines: dict[str, any] + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar storage_linked_services: Storage linked service references. + :vartype storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :ivar arguments: User specified arguments to HDInsightActivity. Type: array (or Expression with + resultType array). + :vartype arguments: any + :ivar get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :vartype get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :ivar script_path: Script path. Type: string (or Expression with resultType string). + :vartype script_path: any + :ivar script_linked_service: Script linked service reference. + :vartype script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar defines: Allows user to specify defines for Pig job request. + :vartype defines: dict[str, any] """ _validation = { @@ -20914,6 +28056,38 @@ def __init__( defines: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword storage_linked_services: Storage linked service references. + :paramtype storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :keyword arguments: User specified arguments to HDInsightActivity. Type: array (or Expression + with resultType array). + :paramtype arguments: any + :keyword get_debug_info: Debug info option. Possible values include: "None", "Always", + "Failure". + :paramtype get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :keyword script_path: Script path. Type: string (or Expression with resultType string). + :paramtype script_path: any + :keyword script_linked_service: Script linked service reference. + :paramtype script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword defines: Allows user to specify defines for Pig job request. + :paramtype defines: dict[str, any] + """ super(HDInsightPigActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'HDInsightPig' # type: str self.storage_linked_services = storage_linked_services @@ -20929,43 +28103,43 @@ class HDInsightSparkActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param root_path: Required. The root path in 'sparkJobLinkedService' for all the job’s files. + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar root_path: Required. The root path in 'sparkJobLinkedService' for all the job’s files. Type: string (or Expression with resultType string). - :type root_path: any - :param entry_file_path: Required. The relative path to the root folder of the code/package to - be executed. Type: string (or Expression with resultType string). - :type entry_file_path: any - :param arguments: The user-specified arguments to HDInsightSparkActivity. - :type arguments: list[any] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param spark_job_linked_service: The storage linked service for uploading the entry file and + :vartype root_path: any + :ivar entry_file_path: Required. The relative path to the root folder of the code/package to be + executed. Type: string (or Expression with resultType string). + :vartype entry_file_path: any + :ivar arguments: The user-specified arguments to HDInsightSparkActivity. + :vartype arguments: list[any] + :ivar get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :vartype get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :ivar spark_job_linked_service: The storage linked service for uploading the entry file and dependencies, and for receiving logs. - :type spark_job_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param class_name: The application's Java/Spark main class. - :type class_name: str - :param proxy_user: The user to impersonate that will execute the job. Type: string (or + :vartype spark_job_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar class_name: The application's Java/Spark main class. + :vartype class_name: str + :ivar proxy_user: The user to impersonate that will execute the job. Type: string (or Expression with resultType string). - :type proxy_user: any - :param spark_config: Spark configuration property. - :type spark_config: dict[str, any] + :vartype proxy_user: any + :ivar spark_config: Spark configuration property. + :vartype spark_config: dict[str, any] """ _validation = { @@ -21014,6 +28188,45 @@ def __init__( spark_config: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword root_path: Required. The root path in 'sparkJobLinkedService' for all the job’s files. + Type: string (or Expression with resultType string). + :paramtype root_path: any + :keyword entry_file_path: Required. The relative path to the root folder of the code/package to + be executed. Type: string (or Expression with resultType string). + :paramtype entry_file_path: any + :keyword arguments: The user-specified arguments to HDInsightSparkActivity. + :paramtype arguments: list[any] + :keyword get_debug_info: Debug info option. Possible values include: "None", "Always", + "Failure". + :paramtype get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :keyword spark_job_linked_service: The storage linked service for uploading the entry file and + dependencies, and for receiving logs. + :paramtype spark_job_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword class_name: The application's Java/Spark main class. + :paramtype class_name: str + :keyword proxy_user: The user to impersonate that will execute the job. Type: string (or + Expression with resultType string). + :paramtype proxy_user: any + :keyword spark_config: Spark configuration property. + :paramtype spark_config: dict[str, any] + """ super(HDInsightSparkActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'HDInsightSpark' # type: str self.root_path = root_path @@ -21031,49 +28244,49 @@ class HDInsightStreamingActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[any] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param mapper: Required. Mapper executable name. Type: string (or Expression with resultType + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar storage_linked_services: Storage linked service references. + :vartype storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :ivar arguments: User specified arguments to HDInsightActivity. + :vartype arguments: list[any] + :ivar get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :vartype get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :ivar mapper: Required. Mapper executable name. Type: string (or Expression with resultType string). - :type mapper: any - :param reducer: Required. Reducer executable name. Type: string (or Expression with resultType + :vartype mapper: any + :ivar reducer: Required. Reducer executable name. Type: string (or Expression with resultType string). - :type reducer: any - :param input: Required. Input blob path. Type: string (or Expression with resultType string). - :type input: any - :param output: Required. Output blob path. Type: string (or Expression with resultType string). - :type output: any - :param file_paths: Required. Paths to streaming job files. Can be directories. - :type file_paths: list[any] - :param file_linked_service: Linked service reference where the files are located. - :type file_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param combiner: Combiner executable name. Type: string (or Expression with resultType string). - :type combiner: any - :param command_environment: Command line environment values. - :type command_environment: list[any] - :param defines: Allows user to specify defines for streaming job request. - :type defines: dict[str, any] + :vartype reducer: any + :ivar input: Required. Input blob path. Type: string (or Expression with resultType string). + :vartype input: any + :ivar output: Required. Output blob path. Type: string (or Expression with resultType string). + :vartype output: any + :ivar file_paths: Required. Paths to streaming job files. Can be directories. + :vartype file_paths: list[any] + :ivar file_linked_service: Linked service reference where the files are located. + :vartype file_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar combiner: Combiner executable name. Type: string (or Expression with resultType string). + :vartype combiner: any + :ivar command_environment: Command line environment values. + :vartype command_environment: list[any] + :ivar defines: Allows user to specify defines for streaming job request. + :vartype defines: dict[str, any] """ _validation = { @@ -21133,6 +28346,53 @@ def __init__( defines: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword storage_linked_services: Storage linked service references. + :paramtype storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :keyword arguments: User specified arguments to HDInsightActivity. + :paramtype arguments: list[any] + :keyword get_debug_info: Debug info option. Possible values include: "None", "Always", + "Failure". + :paramtype get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :keyword mapper: Required. Mapper executable name. Type: string (or Expression with resultType + string). + :paramtype mapper: any + :keyword reducer: Required. Reducer executable name. Type: string (or Expression with + resultType string). + :paramtype reducer: any + :keyword input: Required. Input blob path. Type: string (or Expression with resultType string). + :paramtype input: any + :keyword output: Required. Output blob path. Type: string (or Expression with resultType + string). + :paramtype output: any + :keyword file_paths: Required. Paths to streaming job files. Can be directories. + :paramtype file_paths: list[any] + :keyword file_linked_service: Linked service reference where the files are located. + :paramtype file_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword combiner: Combiner executable name. Type: string (or Expression with resultType + string). + :paramtype combiner: any + :keyword command_environment: Command line environment values. + :paramtype command_environment: list[any] + :keyword defines: Allows user to specify defines for streaming job request. + :paramtype defines: dict[str, any] + """ super(HDInsightStreamingActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'HDInsightStreaming' # type: str self.storage_linked_services = storage_linked_services @@ -21154,70 +28414,70 @@ class HiveLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. IP address or host name of the Hive server, separated by ';' for - multiple hosts (only when serviceDiscoveryMode is enable). - :type host: any - :param port: The TCP port that the Hive server uses to listen for client connections. - :type port: any - :param server_type: The type of Hive server. Possible values include: "HiveServer1", + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. IP address or host name of the Hive server, separated by ';' for multiple + hosts (only when serviceDiscoveryMode is enable). + :vartype host: any + :ivar port: The TCP port that the Hive server uses to listen for client connections. + :vartype port: any + :ivar server_type: The type of Hive server. Possible values include: "HiveServer1", "HiveServer2", "HiveThriftServer". - :type server_type: str or ~azure.mgmt.datafactory.models.HiveServerType - :param thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible + :vartype server_type: str or ~azure.mgmt.datafactory.models.HiveServerType + :ivar thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible values include: "Binary", "SASL", "HTTP ". - :type thrift_transport_protocol: str or + :vartype thrift_transport_protocol: str or ~azure.mgmt.datafactory.models.HiveThriftTransportProtocol - :param authentication_type: Required. The authentication method used to access the Hive server. + :ivar authentication_type: Required. The authentication method used to access the Hive server. Possible values include: "Anonymous", "Username", "UsernameAndPassword", "WindowsAzureHDInsightService". - :type authentication_type: str or ~azure.mgmt.datafactory.models.HiveAuthenticationType - :param service_discovery_mode: true to indicate using the ZooKeeper service, false not. - :type service_discovery_mode: any - :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive Server 2 nodes are + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.HiveAuthenticationType + :ivar service_discovery_mode: true to indicate using the ZooKeeper service, false not. + :vartype service_discovery_mode: any + :ivar zoo_keeper_name_space: The namespace on ZooKeeper under which Hive Server 2 nodes are added. - :type zoo_keeper_name_space: any - :param use_native_query: Specifies whether the driver uses native HiveQL queries,or converts + :vartype zoo_keeper_name_space: any + :ivar use_native_query: Specifies whether the driver uses native HiveQL queries,or converts them into an equivalent form in HiveQL. - :type use_native_query: any - :param username: The user name that you use to access Hive Server. - :type username: any - :param password: The password corresponding to the user name that you provided in the Username + :vartype use_native_query: any + :ivar username: The user name that you use to access Hive Server. + :vartype username: any + :ivar password: The password corresponding to the user name that you provided in the Username field. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param http_path: The partial URL corresponding to the Hive server. - :type http_path: any - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar http_path: The partial URL corresponding to the Hive server. + :vartype http_path: any + :ivar enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :type enable_ssl: any - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + :vartype enable_ssl: any + :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: any - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + :vartype trusted_cert_path: any + :ivar use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: any - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + :vartype use_system_trust_store: any + :ivar allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: any - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + :vartype allow_host_name_cn_mismatch: any + :ivar allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype allow_self_signed_server_cert: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -21279,6 +28539,70 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. IP address or host name of the Hive server, separated by ';' for + multiple hosts (only when serviceDiscoveryMode is enable). + :paramtype host: any + :keyword port: The TCP port that the Hive server uses to listen for client connections. + :paramtype port: any + :keyword server_type: The type of Hive server. Possible values include: "HiveServer1", + "HiveServer2", "HiveThriftServer". + :paramtype server_type: str or ~azure.mgmt.datafactory.models.HiveServerType + :keyword thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible + values include: "Binary", "SASL", "HTTP ". + :paramtype thrift_transport_protocol: str or + ~azure.mgmt.datafactory.models.HiveThriftTransportProtocol + :keyword authentication_type: Required. The authentication method used to access the Hive + server. Possible values include: "Anonymous", "Username", "UsernameAndPassword", + "WindowsAzureHDInsightService". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.HiveAuthenticationType + :keyword service_discovery_mode: true to indicate using the ZooKeeper service, false not. + :paramtype service_discovery_mode: any + :keyword zoo_keeper_name_space: The namespace on ZooKeeper under which Hive Server 2 nodes are + added. + :paramtype zoo_keeper_name_space: any + :keyword use_native_query: Specifies whether the driver uses native HiveQL queries,or converts + them into an equivalent form in HiveQL. + :paramtype use_native_query: any + :keyword username: The user name that you use to access Hive Server. + :paramtype username: any + :keyword password: The password corresponding to the user name that you provided in the + Username field. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword http_path: The partial URL corresponding to the Hive server. + :paramtype http_path: any + :keyword enable_ssl: Specifies whether the connections to the server are encrypted using SSL. + The default value is false. + :paramtype enable_ssl: any + :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates + for verifying the server when connecting over SSL. This property can only be set when using SSL + on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :paramtype trusted_cert_path: any + :keyword use_system_trust_store: Specifies whether to use a CA certificate from the system + trust store or from a specified PEM file. The default value is false. + :paramtype use_system_trust_store: any + :keyword allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :paramtype allow_host_name_cn_mismatch: any + :keyword allow_self_signed_server_cert: Specifies whether to allow self-signed certificates + from the server. The default value is false. + :paramtype allow_self_signed_server_cert: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(HiveLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Hive' # type: str self.host = host @@ -21305,36 +28629,36 @@ class HiveObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param table: The table name of the Hive. Type: string (or Expression with resultType string). - :type table: any - :param schema_type_properties_schema: The schema name of the Hive. Type: string (or Expression + :vartype table_name: any + :ivar table: The table name of the Hive. Type: string (or Expression with resultType string). + :vartype table: any + :ivar schema_type_properties_schema: The schema name of the Hive. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any + :vartype schema_type_properties_schema: any """ _validation = { @@ -21373,6 +28697,37 @@ def __init__( schema_type_properties_schema: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword table: The table name of the Hive. Type: string (or Expression with resultType + string). + :paramtype table: any + :keyword schema_type_properties_schema: The schema name of the Hive. Type: string (or + Expression with resultType string). + :paramtype schema_type_properties_schema: any + """ super(HiveObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'HiveObject' # type: str self.table_name = table_name @@ -21385,32 +28740,32 @@ class HiveSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -21442,6 +28797,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'HiveSource' # type: str self.query = query @@ -21452,47 +28833,47 @@ class HttpDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param relative_url: The relative URL based on the URL in the HttpLinkedService refers to an + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar relative_url: The relative URL based on the URL in the HttpLinkedService refers to an HTTP file Type: string (or Expression with resultType string). - :type relative_url: any - :param request_method: The HTTP method for the HTTP request. Type: string (or Expression with + :vartype relative_url: any + :ivar request_method: The HTTP method for the HTTP request. Type: string (or Expression with resultType string). - :type request_method: any - :param request_body: The body for the HTTP request. Type: string (or Expression with resultType + :vartype request_method: any + :ivar request_body: The body for the HTTP request. Type: string (or Expression with resultType string). - :type request_body: any - :param additional_headers: The headers for the HTTP Request. e.g. + :vartype request_body: any + :ivar additional_headers: The headers for the HTTP Request. e.g. request-header-name-1:request-header-value-1 ... request-header-name-n:request-header-value-n Type: string (or Expression with resultType string). - :type additional_headers: any - :param format: The format of files. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used on files. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + :vartype additional_headers: any + :ivar format: The format of files. + :vartype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :ivar compression: The data compression method used on files. + :vartype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -21537,6 +28918,47 @@ def __init__( compression: Optional["DatasetCompression"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword relative_url: The relative URL based on the URL in the HttpLinkedService refers to an + HTTP file Type: string (or Expression with resultType string). + :paramtype relative_url: any + :keyword request_method: The HTTP method for the HTTP request. Type: string (or Expression with + resultType string). + :paramtype request_method: any + :keyword request_body: The body for the HTTP request. Type: string (or Expression with + resultType string). + :paramtype request_body: any + :keyword additional_headers: The headers for the HTTP Request. e.g. + request-header-name-1:request-header-value-1 + ... + request-header-name-n:request-header-value-n Type: string (or Expression with resultType + string). + :paramtype additional_headers: any + :keyword format: The format of files. + :paramtype format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :keyword compression: The data compression method used on files. + :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ super(HttpDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'HttpFile' # type: str self.relative_url = relative_url @@ -21552,51 +28974,51 @@ class HttpLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param url: Required. The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar url: Required. The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: string (or Expression with resultType string). - :type url: any - :param authentication_type: The authentication type to be used to connect to the HTTP server. + :vartype url: any + :ivar authentication_type: The authentication type to be used to connect to the HTTP server. Possible values include: "Basic", "Anonymous", "Digest", "Windows", "ClientCertificate". - :type authentication_type: str or ~azure.mgmt.datafactory.models.HttpAuthenticationType - :param user_name: User name for Basic, Digest, or Windows authentication. Type: string (or + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.HttpAuthenticationType + :ivar user_name: User name for Basic, Digest, or Windows authentication. Type: string (or Expression with resultType string). - :type user_name: any - :param password: Password for Basic, Digest, Windows, or ClientCertificate with - EmbeddedCertData authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param auth_headers: The additional HTTP headers in the request to RESTful API used for + :vartype user_name: any + :ivar password: Password for Basic, Digest, Windows, or ClientCertificate with EmbeddedCertData + authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar auth_headers: The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). - :type auth_headers: any - :param embedded_cert_data: Base64 encoded certificate data for ClientCertificate - authentication. For on-premises copy with ClientCertificate authentication, either - CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression - with resultType string). - :type embedded_cert_data: any - :param cert_thumbprint: Thumbprint of certificate for ClientCertificate authentication. Only + :vartype auth_headers: any + :ivar embedded_cert_data: Base64 encoded certificate data for ClientCertificate authentication. + For on-premises copy with ClientCertificate authentication, either CertThumbprint or + EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType + string). + :vartype embedded_cert_data: any + :ivar cert_thumbprint: Thumbprint of certificate for ClientCertificate authentication. Only valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). - :type cert_thumbprint: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype cert_thumbprint: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param enable_server_certificate_validation: If true, validate the HTTPS server SSL - certificate. Default value is true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: any + :vartype encrypted_credential: any + :ivar enable_server_certificate_validation: If true, validate the HTTPS server SSL certificate. + Default value is true. Type: boolean (or Expression with resultType boolean). + :vartype enable_server_certificate_validation: any """ _validation = { @@ -21641,6 +29063,51 @@ def __init__( enable_server_certificate_validation: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword url: Required. The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: + string (or Expression with resultType string). + :paramtype url: any + :keyword authentication_type: The authentication type to be used to connect to the HTTP server. + Possible values include: "Basic", "Anonymous", "Digest", "Windows", "ClientCertificate". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.HttpAuthenticationType + :keyword user_name: User name for Basic, Digest, or Windows authentication. Type: string (or + Expression with resultType string). + :paramtype user_name: any + :keyword password: Password for Basic, Digest, Windows, or ClientCertificate with + EmbeddedCertData authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword auth_headers: The additional HTTP headers in the request to RESTful API used for + authorization. Type: object (or Expression with resultType object). + :paramtype auth_headers: any + :keyword embedded_cert_data: Base64 encoded certificate data for ClientCertificate + authentication. For on-premises copy with ClientCertificate authentication, either + CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :paramtype embedded_cert_data: any + :keyword cert_thumbprint: Thumbprint of certificate for ClientCertificate authentication. Only + valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either + CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :paramtype cert_thumbprint: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword enable_server_certificate_validation: If true, validate the HTTPS server SSL + certificate. Default value is true. Type: boolean (or Expression with resultType boolean). + :paramtype enable_server_certificate_validation: any + """ super(HttpLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'HttpServer' # type: str self.url = url @@ -21659,34 +29126,34 @@ class HttpReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: + :vartype disable_metrics_collection: any + :ivar request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). - :type request_method: any - :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: + :vartype request_method: any + :ivar request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). - :type request_body: any - :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: + :vartype request_body: any + :ivar additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: any - :param request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP + :vartype additional_headers: any + :ivar request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. - :type request_timeout: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype request_timeout: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any + :vartype partition_root_path: any """ _validation = { @@ -21720,6 +29187,34 @@ def __init__( partition_root_path: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword request_method: The HTTP method used to call the RESTful API. The default is GET. + Type: string (or Expression with resultType string). + :paramtype request_method: any + :keyword request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: + string (or Expression with resultType string). + :paramtype request_body: any + :keyword additional_headers: The additional HTTP headers in the request to the RESTful API. + Type: string (or Expression with resultType string). + :paramtype additional_headers: any + :keyword request_timeout: Specifies the timeout for a HTTP client to get HTTP response from + HTTP server. + :paramtype request_timeout: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + """ super(HttpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'HttpReadSettings' # type: str self.request_method = request_method @@ -21735,20 +29230,20 @@ class HttpServerLocation(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any - :param relative_url: Specify the relativeUrl of http server. Type: string (or Expression with + :vartype file_name: any + :ivar relative_url: Specify the relativeUrl of http server. Type: string (or Expression with resultType string). - :type relative_url: any + :vartype relative_url: any """ _validation = { @@ -21772,6 +29267,20 @@ def __init__( relative_url: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + :keyword relative_url: Specify the relativeUrl of http server. Type: string (or Expression with + resultType string). + :paramtype relative_url: any + """ super(HttpServerLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) self.type = 'HttpServerLocation' # type: str self.relative_url = relative_url @@ -21782,28 +29291,28 @@ class HttpSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from + :vartype disable_metrics_collection: any + :ivar http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: any + :vartype http_request_timeout: any """ _validation = { @@ -21831,6 +29340,28 @@ def __init__( http_request_timeout: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response + from HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: + string (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype http_request_timeout: any + """ super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'HttpSource' # type: str self.http_request_timeout = http_request_timeout @@ -21841,43 +29372,43 @@ class HubspotLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param client_id: Required. The client ID associated with your Hubspot application. - :type client_id: any - :param client_secret: The client secret associated with your Hubspot application. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param access_token: The access token obtained when initially authenticating your OAuth + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar client_id: Required. The client ID associated with your Hubspot application. + :vartype client_id: any + :ivar client_secret: The client secret associated with your Hubspot application. + :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar access_token: The access token obtained when initially authenticating your OAuth integration. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param refresh_token: The refresh token obtained when initially authenticating your OAuth + :vartype access_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar refresh_token: The refresh token obtained when initially authenticating your OAuth integration. - :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -21920,6 +29451,43 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword client_id: Required. The client ID associated with your Hubspot application. + :paramtype client_id: any + :keyword client_secret: The client secret associated with your Hubspot application. + :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword access_token: The access token obtained when initially authenticating your OAuth + integration. + :paramtype access_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword refresh_token: The refresh token obtained when initially authenticating your OAuth + integration. + :paramtype refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(HubspotLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Hubspot' # type: str self.client_id = client_id @@ -21937,30 +29505,30 @@ class HubspotObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -21995,6 +29563,30 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(HubspotObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'HubspotObject' # type: str self.table_name = table_name @@ -22005,32 +29597,32 @@ class HubspotSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -22062,6 +29654,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'HubspotSource' # type: str self.query = query @@ -22072,29 +29690,29 @@ class IfConditionActivity(ControlActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param expression: Required. An expression that would evaluate to Boolean. This is used to + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar expression: Required. An expression that would evaluate to Boolean. This is used to determine the block of activities (ifTrueActivities or ifFalseActivities) that will be executed. - :type expression: ~azure.mgmt.datafactory.models.Expression - :param if_true_activities: List of activities to execute if expression is evaluated to true. + :vartype expression: ~azure.mgmt.datafactory.models.Expression + :ivar if_true_activities: List of activities to execute if expression is evaluated to true. This is an optional property and if not provided, the activity will exit without any action. - :type if_true_activities: list[~azure.mgmt.datafactory.models.Activity] - :param if_false_activities: List of activities to execute if expression is evaluated to false. + :vartype if_true_activities: list[~azure.mgmt.datafactory.models.Activity] + :ivar if_false_activities: List of activities to execute if expression is evaluated to false. This is an optional property and if not provided, the activity will exit without any action. - :type if_false_activities: list[~azure.mgmt.datafactory.models.Activity] + :vartype if_false_activities: list[~azure.mgmt.datafactory.models.Activity] """ _validation = { @@ -22128,6 +29746,30 @@ def __init__( if_false_activities: Optional[List["Activity"]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword expression: Required. An expression that would evaluate to Boolean. This is used to + determine the block of activities (ifTrueActivities or ifFalseActivities) that will be + executed. + :paramtype expression: ~azure.mgmt.datafactory.models.Expression + :keyword if_true_activities: List of activities to execute if expression is evaluated to true. + This is an optional property and if not provided, the activity will exit without any action. + :paramtype if_true_activities: list[~azure.mgmt.datafactory.models.Activity] + :keyword if_false_activities: List of activities to execute if expression is evaluated to + false. This is an optional property and if not provided, the activity will exit without any + action. + :paramtype if_false_activities: list[~azure.mgmt.datafactory.models.Activity] + """ super(IfConditionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.type = 'IfCondition' # type: str self.expression = expression @@ -22140,53 +29782,52 @@ class ImpalaLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. The IP address or host name of the Impala server. (i.e. - 192.168.222.160). - :type host: any - :param port: The TCP port that the Impala server uses to listen for client connections. The + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. The IP address or host name of the Impala server. (i.e. 192.168.222.160). + :vartype host: any + :ivar port: The TCP port that the Impala server uses to listen for client connections. The default value is 21050. - :type port: any - :param authentication_type: Required. The authentication type to use. Possible values include: + :vartype port: any + :ivar authentication_type: Required. The authentication type to use. Possible values include: "Anonymous", "SASLUsername", "UsernameAndPassword". - :type authentication_type: str or ~azure.mgmt.datafactory.models.ImpalaAuthenticationType - :param username: The user name used to access the Impala server. The default value is anonymous + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.ImpalaAuthenticationType + :ivar username: The user name used to access the Impala server. The default value is anonymous when using SASLUsername. - :type username: any - :param password: The password corresponding to the user name when using UsernameAndPassword. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + :vartype username: any + :ivar password: The password corresponding to the user name when using UsernameAndPassword. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :type enable_ssl: any - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + :vartype enable_ssl: any + :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: any - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + :vartype trusted_cert_path: any + :ivar use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: any - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + :vartype use_system_trust_store: any + :ivar allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: any - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + :vartype allow_host_name_cn_mismatch: any + :ivar allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype allow_self_signed_server_cert: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -22236,6 +29877,53 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. The IP address or host name of the Impala server. (i.e. + 192.168.222.160). + :paramtype host: any + :keyword port: The TCP port that the Impala server uses to listen for client connections. The + default value is 21050. + :paramtype port: any + :keyword authentication_type: Required. The authentication type to use. Possible values + include: "Anonymous", "SASLUsername", "UsernameAndPassword". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.ImpalaAuthenticationType + :keyword username: The user name used to access the Impala server. The default value is + anonymous when using SASLUsername. + :paramtype username: any + :keyword password: The password corresponding to the user name when using UsernameAndPassword. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword enable_ssl: Specifies whether the connections to the server are encrypted using SSL. + The default value is false. + :paramtype enable_ssl: any + :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates + for verifying the server when connecting over SSL. This property can only be set when using SSL + on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :paramtype trusted_cert_path: any + :keyword use_system_trust_store: Specifies whether to use a CA certificate from the system + trust store or from a specified PEM file. The default value is false. + :paramtype use_system_trust_store: any + :keyword allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :paramtype allow_host_name_cn_mismatch: any + :keyword allow_self_signed_server_cert: Specifies whether to allow self-signed certificates + from the server. The default value is false. + :paramtype allow_self_signed_server_cert: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(ImpalaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Impala' # type: str self.host = host @@ -22256,37 +29944,36 @@ class ImpalaObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param table: The table name of the Impala. Type: string (or Expression with resultType - string). - :type table: any - :param schema_type_properties_schema: The schema name of the Impala. Type: string (or - Expression with resultType string). - :type schema_type_properties_schema: any + :vartype table_name: any + :ivar table: The table name of the Impala. Type: string (or Expression with resultType string). + :vartype table: any + :ivar schema_type_properties_schema: The schema name of the Impala. Type: string (or Expression + with resultType string). + :vartype schema_type_properties_schema: any """ _validation = { @@ -22325,6 +30012,37 @@ def __init__( schema_type_properties_schema: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword table: The table name of the Impala. Type: string (or Expression with resultType + string). + :paramtype table: any + :keyword schema_type_properties_schema: The schema name of the Impala. Type: string (or + Expression with resultType string). + :paramtype schema_type_properties_schema: any + """ super(ImpalaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'ImpalaObject' # type: str self.table_name = table_name @@ -22337,32 +30055,32 @@ class ImpalaSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -22394,6 +30112,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ImpalaSource' # type: str self.query = query @@ -22404,39 +30148,39 @@ class InformixLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The non-access credential portion of the connection string + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param authentication_type: Type of authentication used to connect to the Informix as ODBC data + :vartype connection_string: any + :ivar authentication_type: Type of authentication used to connect to the Informix as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). - :type authentication_type: any - :param credential: The access credential portion of the connection string specified in + :vartype authentication_type: any + :ivar credential: The access credential portion of the connection string specified in driver-specific property-value format. - :type credential: ~azure.mgmt.datafactory.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or Expression with + :vartype credential: ~azure.mgmt.datafactory.models.SecretBase + :ivar user_name: User name for Basic authentication. Type: string (or Expression with resultType string). - :type user_name: any - :param password: Password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype user_name: any + :ivar password: Password for Basic authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -22475,6 +30219,39 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The non-access credential portion of the connection + string as well as an optional encrypted credential. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword authentication_type: Type of authentication used to connect to the Informix as ODBC + data store. Possible values are: Anonymous and Basic. Type: string (or Expression with + resultType string). + :paramtype authentication_type: any + :keyword credential: The access credential portion of the connection string specified in + driver-specific property-value format. + :paramtype credential: ~azure.mgmt.datafactory.models.SecretBase + :keyword user_name: User name for Basic authentication. Type: string (or Expression with + resultType string). + :paramtype user_name: any + :keyword password: Password for Basic authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(InformixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Informix' # type: str self.connection_string = connection_string @@ -22490,32 +30267,32 @@ class InformixSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param pre_copy_script: A query to execute before starting the copy. Type: string (or - Expression with resultType string). - :type pre_copy_script: any + :vartype disable_metrics_collection: any + :ivar pre_copy_script: A query to execute before starting the copy. Type: string (or Expression + with resultType string). + :vartype pre_copy_script: any """ _validation = { @@ -22547,6 +30324,32 @@ def __init__( pre_copy_script: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :paramtype pre_copy_script: any + """ super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'InformixSink' # type: str self.pre_copy_script = pre_copy_script @@ -22557,31 +30360,31 @@ class InformixSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: Database query. Type: string (or Expression with resultType string). - :type query: any + :vartype additional_columns: any + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: any """ _validation = { @@ -22613,6 +30416,31 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: any + """ super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'InformixSource' # type: str self.query = query @@ -22623,31 +30451,30 @@ class InformixTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The Informix table name. Type: string (or Expression with resultType - string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The Informix table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -22682,6 +30509,31 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The Informix table name. Type: string (or Expression with resultType + string). + :paramtype table_name: any + """ super(InformixTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'InformixTable' # type: str self.table_name = table_name @@ -22695,14 +30547,14 @@ class IntegrationRuntime(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of integration runtime.Constant filled by server. Possible values + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". - :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType - :param description: Integration runtime description. - :type description: str + :vartype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType + :ivar description: Integration runtime description. + :vartype description: str """ _validation = { @@ -22726,6 +30578,13 @@ def __init__( description: Optional[str] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Integration runtime description. + :paramtype description: str + """ super(IntegrationRuntime, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = 'IntegrationRuntime' # type: str @@ -22735,10 +30594,10 @@ def __init__( class IntegrationRuntimeAuthKeys(msrest.serialization.Model): """The integration runtime authentication keys. - :param auth_key1: The primary integration runtime authentication key. - :type auth_key1: str - :param auth_key2: The secondary integration runtime authentication key. - :type auth_key2: str + :ivar auth_key1: The primary integration runtime authentication key. + :vartype auth_key1: str + :ivar auth_key2: The secondary integration runtime authentication key. + :vartype auth_key2: str """ _attribute_map = { @@ -22753,6 +30612,12 @@ def __init__( auth_key2: Optional[str] = None, **kwargs ): + """ + :keyword auth_key1: The primary integration runtime authentication key. + :paramtype auth_key1: str + :keyword auth_key2: The secondary integration runtime authentication key. + :paramtype auth_key2: str + """ super(IntegrationRuntimeAuthKeys, self).__init__(**kwargs) self.auth_key1 = auth_key1 self.auth_key2 = auth_key2 @@ -22761,24 +30626,25 @@ def __init__( class IntegrationRuntimeComputeProperties(msrest.serialization.Model): """The compute resource properties for managed integration runtime. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param location: The location for managed integration runtime. The supported regions could be + :vartype additional_properties: dict[str, any] + :ivar location: The location for managed integration runtime. The supported regions could be found on https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities. - :type location: str - :param node_size: The node size requirement to managed integration runtime. - :type node_size: str - :param number_of_nodes: The required number of nodes for managed integration runtime. - :type number_of_nodes: int - :param max_parallel_executions_per_node: Maximum parallel executions count per node for managed + :vartype location: str + :ivar node_size: The node size requirement to managed integration runtime. + :vartype node_size: str + :ivar number_of_nodes: The required number of nodes for managed integration runtime. + :vartype number_of_nodes: int + :ivar max_parallel_executions_per_node: Maximum parallel executions count per node for managed integration runtime. - :type max_parallel_executions_per_node: int - :param data_flow_properties: Data flow properties for managed integration runtime. - :type data_flow_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeDataFlowProperties - :param v_net_properties: VNet properties for managed integration runtime. - :type v_net_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties + :vartype max_parallel_executions_per_node: int + :ivar data_flow_properties: Data flow properties for managed integration runtime. + :vartype data_flow_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataFlowProperties + :ivar v_net_properties: VNet properties for managed integration runtime. + :vartype v_net_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties """ _validation = { @@ -22808,6 +30674,27 @@ def __init__( v_net_properties: Optional["IntegrationRuntimeVNetProperties"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword location: The location for managed integration runtime. The supported regions could be + found on + https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities. + :paramtype location: str + :keyword node_size: The node size requirement to managed integration runtime. + :paramtype node_size: str + :keyword number_of_nodes: The required number of nodes for managed integration runtime. + :paramtype number_of_nodes: int + :keyword max_parallel_executions_per_node: Maximum parallel executions count per node for + managed integration runtime. + :paramtype max_parallel_executions_per_node: int + :keyword data_flow_properties: Data flow properties for managed integration runtime. + :paramtype data_flow_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataFlowProperties + :keyword v_net_properties: VNet properties for managed integration runtime. + :paramtype v_net_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties + """ super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) self.additional_properties = additional_properties self.location = location @@ -22823,9 +30710,9 @@ class IntegrationRuntimeConnectionInfo(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] + :vartype additional_properties: dict[str, any] :ivar service_token: The token generated in service. Callers use this token to authenticate to integration runtime. :vartype service_token: str @@ -22868,6 +30755,11 @@ def __init__( additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(IntegrationRuntimeConnectionInfo, self).__init__(**kwargs) self.additional_properties = additional_properties self.service_token = None @@ -22881,8 +30773,8 @@ def __init__( class IntegrationRuntimeCustomerVirtualNetwork(msrest.serialization.Model): """The definition and properties of virtual network to which Azure-SSIS integration runtime will join. - :param subnet_id: The ID of subnet to which Azure-SSIS integration runtime will join. - :type subnet_id: str + :ivar subnet_id: The ID of subnet to which Azure-SSIS integration runtime will join. + :vartype subnet_id: str """ _attribute_map = { @@ -22895,6 +30787,10 @@ def __init__( subnet_id: Optional[str] = None, **kwargs ): + """ + :keyword subnet_id: The ID of subnet to which Azure-SSIS integration runtime will join. + :paramtype subnet_id: str + """ super(IntegrationRuntimeCustomerVirtualNetwork, self).__init__(**kwargs) self.subnet_id = subnet_id @@ -22902,11 +30798,11 @@ def __init__( class IntegrationRuntimeCustomSetupScriptProperties(msrest.serialization.Model): """Custom setup script properties for a managed dedicated integration runtime. - :param blob_container_uri: The URI of the Azure blob container that contains the custom setup + :ivar blob_container_uri: The URI of the Azure blob container that contains the custom setup script. - :type blob_container_uri: str - :param sas_token: The SAS token of the Azure blob container. - :type sas_token: ~azure.mgmt.datafactory.models.SecureString + :vartype blob_container_uri: str + :ivar sas_token: The SAS token of the Azure blob container. + :vartype sas_token: ~azure.mgmt.datafactory.models.SecureString """ _attribute_map = { @@ -22921,6 +30817,13 @@ def __init__( sas_token: Optional["SecureString"] = None, **kwargs ): + """ + :keyword blob_container_uri: The URI of the Azure blob container that contains the custom setup + script. + :paramtype blob_container_uri: str + :keyword sas_token: The SAS token of the Azure blob container. + :paramtype sas_token: ~azure.mgmt.datafactory.models.SecureString + """ super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) self.blob_container_uri = blob_container_uri self.sas_token = sas_token @@ -22929,21 +30832,21 @@ def __init__( class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model): """Data flow properties for managed integration runtime. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param compute_type: Compute type of the cluster which will execute data flow job. Possible + :vartype additional_properties: dict[str, any] + :ivar compute_type: Compute type of the cluster which will execute data flow job. Possible values include: "General", "MemoryOptimized", "ComputeOptimized". - :type compute_type: str or ~azure.mgmt.datafactory.models.DataFlowComputeType - :param core_count: Core count of the cluster which will execute data flow job. Supported values + :vartype compute_type: str or ~azure.mgmt.datafactory.models.DataFlowComputeType + :ivar core_count: Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. - :type core_count: int - :param time_to_live: Time to live (in minutes) setting of the cluster which will execute data + :vartype core_count: int + :ivar time_to_live: Time to live (in minutes) setting of the cluster which will execute data flow job. - :type time_to_live: int - :param cleanup: Cluster will not be recycled and it will be used in next data flow activity run + :vartype time_to_live: int + :ivar cleanup: Cluster will not be recycled and it will be used in next data flow activity run until TTL (time to live) is reached if this is set as false. Default is true. - :type cleanup: bool + :vartype cleanup: bool """ _validation = { @@ -22968,6 +30871,23 @@ def __init__( cleanup: Optional[bool] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword compute_type: Compute type of the cluster which will execute data flow job. Possible + values include: "General", "MemoryOptimized", "ComputeOptimized". + :paramtype compute_type: str or ~azure.mgmt.datafactory.models.DataFlowComputeType + :keyword core_count: Core count of the cluster which will execute data flow job. Supported + values are: 8, 16, 32, 48, 80, 144 and 272. + :paramtype core_count: int + :keyword time_to_live: Time to live (in minutes) setting of the cluster which will execute data + flow job. + :paramtype time_to_live: int + :keyword cleanup: Cluster will not be recycled and it will be used in next data flow activity + run until TTL (time to live) is reached if this is set as false. Default is true. + :paramtype cleanup: bool + """ super(IntegrationRuntimeDataFlowProperties, self).__init__(**kwargs) self.additional_properties = additional_properties self.compute_type = compute_type @@ -22979,12 +30899,12 @@ def __init__( class IntegrationRuntimeDataProxyProperties(msrest.serialization.Model): """Data proxy properties for a managed dedicated integration runtime. - :param connect_via: The self-hosted integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.EntityReference - :param staging_linked_service: The staging linked service reference. - :type staging_linked_service: ~azure.mgmt.datafactory.models.EntityReference - :param path: The path to contain the staged data in the Blob storage. - :type path: str + :ivar connect_via: The self-hosted integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.EntityReference + :ivar staging_linked_service: The staging linked service reference. + :vartype staging_linked_service: ~azure.mgmt.datafactory.models.EntityReference + :ivar path: The path to contain the staged data in the Blob storage. + :vartype path: str """ _attribute_map = { @@ -23001,6 +30921,14 @@ def __init__( path: Optional[str] = None, **kwargs ): + """ + :keyword connect_via: The self-hosted integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.EntityReference + :keyword staging_linked_service: The staging linked service reference. + :paramtype staging_linked_service: ~azure.mgmt.datafactory.models.EntityReference + :keyword path: The path to contain the staged data in the Blob storage. + :paramtype path: str + """ super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) self.connect_via = connect_via self.staging_linked_service = staging_linked_service @@ -23012,10 +30940,10 @@ class IntegrationRuntimeDebugResource(SubResourceDebugResource): All required parameters must be populated in order to send to Azure. - :param name: The resource name. - :type name: str - :param properties: Required. Integration runtime properties. - :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime + :ivar name: The resource name. + :vartype name: str + :ivar properties: Required. Integration runtime properties. + :vartype properties: ~azure.mgmt.datafactory.models.IntegrationRuntime """ _validation = { @@ -23034,6 +30962,12 @@ def __init__( name: Optional[str] = None, **kwargs ): + """ + :keyword name: The resource name. + :paramtype name: str + :keyword properties: Required. Integration runtime properties. + :paramtype properties: ~azure.mgmt.datafactory.models.IntegrationRuntime + """ super(IntegrationRuntimeDebugResource, self).__init__(name=name, **kwargs) self.properties = properties @@ -23043,10 +30977,10 @@ class IntegrationRuntimeListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of integration runtimes. - :type value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: Required. List of integration runtimes. + :vartype value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeResource] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _validation = { @@ -23065,6 +30999,12 @@ def __init__( next_link: Optional[str] = None, **kwargs ): + """ + :keyword value: Required. List of integration runtimes. + :paramtype value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeResource] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(IntegrationRuntimeListResponse, self).__init__(**kwargs) self.value = value self.next_link = next_link @@ -23073,10 +31013,10 @@ def __init__( class IntegrationRuntimeMonitoringData(msrest.serialization.Model): """Get monitoring data response. - :param name: Integration runtime name. - :type name: str - :param nodes: Integration runtime node monitoring data. - :type nodes: list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] + :ivar name: Integration runtime name. + :vartype name: str + :ivar nodes: Integration runtime node monitoring data. + :vartype nodes: list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] """ _attribute_map = { @@ -23091,6 +31031,12 @@ def __init__( nodes: Optional[List["IntegrationRuntimeNodeMonitoringData"]] = None, **kwargs ): + """ + :keyword name: Integration runtime name. + :paramtype name: str + :keyword nodes: Integration runtime node monitoring data. + :paramtype nodes: list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] + """ super(IntegrationRuntimeMonitoringData, self).__init__(**kwargs) self.name = name self.nodes = nodes @@ -23117,6 +31063,8 @@ def __init__( self, **kwargs ): + """ + """ super(IntegrationRuntimeNodeIpAddress, self).__init__(**kwargs) self.ip_address = None @@ -23126,9 +31074,9 @@ class IntegrationRuntimeNodeMonitoringData(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] + :vartype additional_properties: dict[str, any] :ivar node_name: Name of the integration runtime node. :vartype node_name: str :ivar available_memory_in_mb: Available memory (MB) on the integration runtime node. @@ -23177,6 +31125,11 @@ def __init__( additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(IntegrationRuntimeNodeMonitoringData, self).__init__(**kwargs) self.additional_properties = additional_properties self.node_name = None @@ -23192,10 +31145,10 @@ def __init__( class IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint(msrest.serialization.Model): """Azure-SSIS integration runtime outbound network dependency endpoints for one category. - :param category: The category of outbound network dependency. - :type category: str - :param endpoints: The endpoints for outbound network dependency. - :type endpoints: + :ivar category: The category of outbound network dependency. + :vartype category: str + :ivar endpoints: The endpoints for outbound network dependency. + :vartype endpoints: list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpoint] """ @@ -23211,6 +31164,13 @@ def __init__( endpoints: Optional[List["IntegrationRuntimeOutboundNetworkDependenciesEndpoint"]] = None, **kwargs ): + """ + :keyword category: The category of outbound network dependency. + :paramtype category: str + :keyword endpoints: The endpoints for outbound network dependency. + :paramtype endpoints: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpoint] + """ super(IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint, self).__init__(**kwargs) self.category = category self.endpoints = endpoints @@ -23219,10 +31179,10 @@ def __init__( class IntegrationRuntimeOutboundNetworkDependenciesEndpoint(msrest.serialization.Model): """The endpoint for Azure-SSIS integration runtime outbound network dependency. - :param domain_name: The domain name of endpoint. - :type domain_name: str - :param endpoint_details: The details of endpoint. - :type endpoint_details: + :ivar domain_name: The domain name of endpoint. + :vartype domain_name: str + :ivar endpoint_details: The details of endpoint. + :vartype endpoint_details: list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails] """ @@ -23238,6 +31198,13 @@ def __init__( endpoint_details: Optional[List["IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails"]] = None, **kwargs ): + """ + :keyword domain_name: The domain name of endpoint. + :paramtype domain_name: str + :keyword endpoint_details: The details of endpoint. + :paramtype endpoint_details: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails] + """ super(IntegrationRuntimeOutboundNetworkDependenciesEndpoint, self).__init__(**kwargs) self.domain_name = domain_name self.endpoint_details = endpoint_details @@ -23246,8 +31213,8 @@ def __init__( class IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(msrest.serialization.Model): """The details of Azure-SSIS integration runtime outbound network dependency endpoint. - :param port: The port of endpoint. - :type port: int + :ivar port: The port of endpoint. + :vartype port: int """ _attribute_map = { @@ -23260,6 +31227,10 @@ def __init__( port: Optional[int] = None, **kwargs ): + """ + :keyword port: The port of endpoint. + :paramtype port: int + """ super(IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails, self).__init__(**kwargs) self.port = port @@ -23267,8 +31238,8 @@ def __init__( class IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse(msrest.serialization.Model): """Azure-SSIS integration runtime outbound network dependency endpoints. - :param value: The list of outbound network dependency endpoints. - :type value: + :ivar value: The list of outbound network dependency endpoints. + :vartype value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint] """ @@ -23282,6 +31253,11 @@ def __init__( value: Optional[List["IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint"]] = None, **kwargs ): + """ + :keyword value: The list of outbound network dependency endpoints. + :paramtype value: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint] + """ super(IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, self).__init__(**kwargs) self.value = value @@ -23295,10 +31271,10 @@ class IntegrationRuntimeReference(msrest.serialization.Model): :ivar type: Type of integration runtime. Has constant value: "IntegrationRuntimeReference". :vartype type: str - :param reference_name: Required. Reference integration runtime name. - :type reference_name: str - :param parameters: Arguments for integration runtime. - :type parameters: dict[str, any] + :ivar reference_name: Required. Reference integration runtime name. + :vartype reference_name: str + :ivar parameters: Arguments for integration runtime. + :vartype parameters: dict[str, any] """ _validation = { @@ -23321,6 +31297,12 @@ def __init__( parameters: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword reference_name: Required. Reference integration runtime name. + :paramtype reference_name: str + :keyword parameters: Arguments for integration runtime. + :paramtype parameters: dict[str, any] + """ super(IntegrationRuntimeReference, self).__init__(**kwargs) self.reference_name = reference_name self.parameters = parameters @@ -23329,9 +31311,9 @@ def __init__( class IntegrationRuntimeRegenerateKeyParameters(msrest.serialization.Model): """Parameters to regenerate the authentication key. - :param key_name: The name of the authentication key to regenerate. Possible values include: + :ivar key_name: The name of the authentication key to regenerate. Possible values include: "authKey1", "authKey2". - :type key_name: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName + :vartype key_name: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName """ _attribute_map = { @@ -23344,6 +31326,11 @@ def __init__( key_name: Optional[Union[str, "IntegrationRuntimeAuthKeyName"]] = None, **kwargs ): + """ + :keyword key_name: The name of the authentication key to regenerate. Possible values include: + "authKey1", "authKey2". + :paramtype key_name: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName + """ super(IntegrationRuntimeRegenerateKeyParameters, self).__init__(**kwargs) self.key_name = key_name @@ -23363,8 +31350,8 @@ class IntegrationRuntimeResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Required. Integration runtime properties. - :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime + :ivar properties: Required. Integration runtime properties. + :vartype properties: ~azure.mgmt.datafactory.models.IntegrationRuntime """ _validation = { @@ -23389,6 +31376,10 @@ def __init__( properties: "IntegrationRuntime", **kwargs ): + """ + :keyword properties: Required. Integration runtime properties. + :paramtype properties: ~azure.mgmt.datafactory.models.IntegrationRuntime + """ super(IntegrationRuntimeResource, self).__init__(**kwargs) self.properties = properties @@ -23396,24 +31387,24 @@ def __init__( class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): """Catalog information for managed dedicated integration runtime. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param catalog_server_endpoint: The catalog database server URL. - :type catalog_server_endpoint: str - :param catalog_admin_user_name: The administrator user name of catalog database. - :type catalog_admin_user_name: str - :param catalog_admin_password: The password of the administrator user account of the catalog + :vartype additional_properties: dict[str, any] + :ivar catalog_server_endpoint: The catalog database server URL. + :vartype catalog_server_endpoint: str + :ivar catalog_admin_user_name: The administrator user name of catalog database. + :vartype catalog_admin_user_name: str + :ivar catalog_admin_password: The password of the administrator user account of the catalog database. - :type catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString - :param catalog_pricing_tier: The pricing tier for the catalog database. The valid values could + :vartype catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString + :ivar catalog_pricing_tier: The pricing tier for the catalog database. The valid values could be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible values include: "Basic", "Standard", "Premium", "PremiumRS". - :type catalog_pricing_tier: str or + :vartype catalog_pricing_tier: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier - :param dual_standby_pair_name: The dual standby pair name of Azure-SSIS Integration Runtimes to + :ivar dual_standby_pair_name: The dual standby pair name of Azure-SSIS Integration Runtimes to support SSISDB failover. - :type dual_standby_pair_name: str + :vartype dual_standby_pair_name: str """ _validation = { @@ -23440,6 +31431,26 @@ def __init__( dual_standby_pair_name: Optional[str] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword catalog_server_endpoint: The catalog database server URL. + :paramtype catalog_server_endpoint: str + :keyword catalog_admin_user_name: The administrator user name of catalog database. + :paramtype catalog_admin_user_name: str + :keyword catalog_admin_password: The password of the administrator user account of the catalog + database. + :paramtype catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString + :keyword catalog_pricing_tier: The pricing tier for the catalog database. The valid values + could be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible + values include: "Basic", "Standard", "Premium", "PremiumRS". + :paramtype catalog_pricing_tier: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier + :keyword dual_standby_pair_name: The dual standby pair name of Azure-SSIS Integration Runtimes + to support SSISDB failover. + :paramtype dual_standby_pair_name: str + """ super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) self.additional_properties = additional_properties self.catalog_server_endpoint = catalog_server_endpoint @@ -23452,32 +31463,31 @@ def __init__( class IntegrationRuntimeSsisProperties(msrest.serialization.Model): """SSIS properties for managed integration runtime. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param catalog_info: Catalog information for managed dedicated integration runtime. - :type catalog_info: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo - :param license_type: License type for bringing your own license scenario. Possible values + :vartype additional_properties: dict[str, any] + :ivar catalog_info: Catalog information for managed dedicated integration runtime. + :vartype catalog_info: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo + :ivar license_type: License type for bringing your own license scenario. Possible values include: "BasePrice", "LicenseIncluded". - :type license_type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType - :param custom_setup_script_properties: Custom setup script properties for a managed dedicated + :vartype license_type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType + :ivar custom_setup_script_properties: Custom setup script properties for a managed dedicated integration runtime. - :type custom_setup_script_properties: + :vartype custom_setup_script_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties - :param data_proxy_properties: Data proxy properties for a managed dedicated integration - runtime. - :type data_proxy_properties: + :ivar data_proxy_properties: Data proxy properties for a managed dedicated integration runtime. + :vartype data_proxy_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties - :param edition: The edition for the SSIS Integration Runtime. Possible values include: + :ivar edition: The edition for the SSIS Integration Runtime. Possible values include: "Standard", "Enterprise". - :type edition: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition - :param express_custom_setup_properties: Custom setup without script properties for a SSIS + :vartype edition: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition + :ivar express_custom_setup_properties: Custom setup without script properties for a SSIS integration runtime. - :type express_custom_setup_properties: list[~azure.mgmt.datafactory.models.CustomSetupBase] - :param package_stores: Package stores for the SSIS Integration Runtime. - :type package_stores: list[~azure.mgmt.datafactory.models.PackageStore] - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype express_custom_setup_properties: list[~azure.mgmt.datafactory.models.CustomSetupBase] + :ivar package_stores: Package stores for the SSIS Integration Runtime. + :vartype package_stores: list[~azure.mgmt.datafactory.models.PackageStore] + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _attribute_map = { @@ -23506,6 +31516,35 @@ def __init__( credential: Optional["CredentialReference"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword catalog_info: Catalog information for managed dedicated integration runtime. + :paramtype catalog_info: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo + :keyword license_type: License type for bringing your own license scenario. Possible values + include: "BasePrice", "LicenseIncluded". + :paramtype license_type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType + :keyword custom_setup_script_properties: Custom setup script properties for a managed dedicated + integration runtime. + :paramtype custom_setup_script_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties + :keyword data_proxy_properties: Data proxy properties for a managed dedicated integration + runtime. + :paramtype data_proxy_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties + :keyword edition: The edition for the SSIS Integration Runtime. Possible values include: + "Standard", "Enterprise". + :paramtype edition: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition + :keyword express_custom_setup_properties: Custom setup without script properties for a SSIS + integration runtime. + :paramtype express_custom_setup_properties: + list[~azure.mgmt.datafactory.models.CustomSetupBase] + :keyword package_stores: Package stores for the SSIS Integration Runtime. + :paramtype package_stores: list[~azure.mgmt.datafactory.models.PackageStore] + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) self.additional_properties = additional_properties self.catalog_info = catalog_info @@ -23528,12 +31567,12 @@ class IntegrationRuntimeStatus(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of integration runtime.Constant filled by server. Possible values + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". - :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType + :vartype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType :ivar data_factory_name: The data factory name which the integration runtime belong to. :vartype data_factory_name: str :ivar state: The state of integration runtime. Possible values include: "Initial", "Stopped", @@ -23565,6 +31604,11 @@ def __init__( additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(IntegrationRuntimeStatus, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = 'IntegrationRuntimeStatus' # type: str @@ -23577,10 +31621,10 @@ class IntegrationRuntimeStatusListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of integration runtime status. - :type value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: Required. List of integration runtime status. + :vartype value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _validation = { @@ -23599,6 +31643,12 @@ def __init__( next_link: Optional[str] = None, **kwargs ): + """ + :keyword value: Required. List of integration runtime status. + :paramtype value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(IntegrationRuntimeStatusListResponse, self).__init__(**kwargs) self.value = value self.next_link = next_link @@ -23613,8 +31663,8 @@ class IntegrationRuntimeStatusResponse(msrest.serialization.Model): :ivar name: The integration runtime name. :vartype name: str - :param properties: Required. Integration runtime properties. - :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus + :ivar properties: Required. Integration runtime properties. + :vartype properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus """ _validation = { @@ -23633,6 +31683,10 @@ def __init__( properties: "IntegrationRuntimeStatus", **kwargs ): + """ + :keyword properties: Required. Integration runtime properties. + :paramtype properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus + """ super(IntegrationRuntimeStatusResponse, self).__init__(**kwargs) self.name = None self.properties = properties @@ -23641,19 +31695,18 @@ def __init__( class IntegrationRuntimeVNetProperties(msrest.serialization.Model): """VNet properties for managed integration runtime. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param v_net_id: The ID of the VNet that this integration runtime will join. - :type v_net_id: str - :param subnet: The name of the subnet this integration runtime will join. - :type subnet: str - :param public_i_ps: Resource IDs of the public IP addresses that this integration runtime will + :vartype additional_properties: dict[str, any] + :ivar v_net_id: The ID of the VNet that this integration runtime will join. + :vartype v_net_id: str + :ivar subnet: The name of the subnet this integration runtime will join. + :vartype subnet: str + :ivar public_i_ps: Resource IDs of the public IP addresses that this integration runtime will use. - :type public_i_ps: list[str] - :param subnet_id: The ID of subnet, to which this Azure-SSIS integration runtime will be - joined. - :type subnet_id: str + :vartype public_i_ps: list[str] + :ivar subnet_id: The ID of subnet, to which this Azure-SSIS integration runtime will be joined. + :vartype subnet_id: str """ _attribute_map = { @@ -23674,6 +31727,21 @@ def __init__( subnet_id: Optional[str] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword v_net_id: The ID of the VNet that this integration runtime will join. + :paramtype v_net_id: str + :keyword subnet: The name of the subnet this integration runtime will join. + :paramtype subnet: str + :keyword public_i_ps: Resource IDs of the public IP addresses that this integration runtime + will use. + :paramtype public_i_ps: list[str] + :keyword subnet_id: The ID of subnet, to which this Azure-SSIS integration runtime will be + joined. + :paramtype subnet_id: str + """ super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) self.additional_properties = additional_properties self.v_net_id = v_net_id @@ -23687,44 +31755,43 @@ class JiraLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. The IP address or host name of the Jira service. (e.g. - jira.example.com). - :type host: any - :param port: The TCP port that the Jira server uses to listen for client connections. The + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. The IP address or host name of the Jira service. (e.g. jira.example.com). + :vartype host: any + :ivar port: The TCP port that the Jira server uses to listen for client connections. The default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. - :type port: any - :param username: Required. The user name that you use to access Jira Service. - :type username: any - :param password: The password corresponding to the user name that you provided in the username + :vartype port: any + :ivar username: Required. The user name that you use to access Jira Service. + :vartype username: any + :ivar password: The password corresponding to the user name that you provided in the username field. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -23768,6 +31835,44 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. The IP address or host name of the Jira service. (e.g. + jira.example.com). + :paramtype host: any + :keyword port: The TCP port that the Jira server uses to listen for client connections. The + default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. + :paramtype port: any + :keyword username: Required. The user name that you use to access Jira Service. + :paramtype username: any + :keyword password: The password corresponding to the user name that you provided in the + username field. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(JiraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Jira' # type: str self.host = host @@ -23785,30 +31890,30 @@ class JiraObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -23843,6 +31948,30 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(JiraObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'JiraObject' # type: str self.table_name = table_name @@ -23853,32 +31982,32 @@ class JiraSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -23910,6 +32039,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'JiraSource' # type: str self.query = query @@ -23920,38 +32075,38 @@ class JsonDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param location: The location of the json data storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param encoding_name: The code page name of the preferred encoding. If not specified, the + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar location: The location of the json data storage. + :vartype location: ~azure.mgmt.datafactory.models.DatasetLocation + :ivar encoding_name: The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). - :type encoding_name: any - :param compression: The data compression method used for the json dataset. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + :vartype encoding_name: any + :ivar compression: The data compression method used for the json dataset. + :vartype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -23990,6 +32145,38 @@ def __init__( compression: Optional["DatasetCompression"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword location: The location of the json data storage. + :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation + :keyword encoding_name: The code page name of the preferred encoding. If not specified, the + default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column + of the table in the following link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + resultType string). + :paramtype encoding_name: any + :keyword compression: The data compression method used for the json dataset. + :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ super(JsonDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'Json' # type: str self.location = location @@ -24002,36 +32189,36 @@ class JsonFormat(DatasetStorageFormat): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage format.Constant filled by server. - :type type: str - :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: any - :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: any - :param file_pattern: File pattern of JSON. To be more specific, the way of separating a + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage format.Constant filled by server. + :vartype type: str + :ivar serializer: Serializer. Type: string (or Expression with resultType string). + :vartype serializer: any + :ivar deserializer: Deserializer. Type: string (or Expression with resultType string). + :vartype deserializer: any + :ivar file_pattern: File pattern of JSON. To be more specific, the way of separating a collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. - :type file_pattern: any - :param nesting_separator: The character used to separate nesting levels. Default value is '.' + :vartype file_pattern: any + :ivar nesting_separator: The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). - :type nesting_separator: any - :param encoding_name: The code page name of the preferred encoding. If not provided, the - default value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. - The full list of supported values can be found in the 'Name' column of the table of encodings - in the following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or + :vartype nesting_separator: any + :ivar encoding_name: The code page name of the preferred encoding. If not provided, the default + value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. The full + list of supported values can be found in the 'Name' column of the table of encodings in the + following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or Expression with resultType string). - :type encoding_name: any - :param json_node_reference: The JSONPath of the JSON array element to be flattened. Example: + :vartype encoding_name: any + :ivar json_node_reference: The JSONPath of the JSON array element to be flattened. Example: "$.ArrayPath". Type: string (or Expression with resultType string). - :type json_node_reference: any - :param json_path_definition: The JSONPath definition for each column mapping with a customized + :vartype json_node_reference: any + :ivar json_path_definition: The JSONPath definition for each column mapping with a customized column name to extract data from JSON file. For fields under root object, start with "$"; for fields inside the array chosen by jsonNodeReference property, start from the array element. Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. Type: object (or Expression with resultType object). - :type json_path_definition: any + :vartype json_path_definition: any """ _validation = { @@ -24063,6 +32250,36 @@ def __init__( json_path_definition: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword serializer: Serializer. Type: string (or Expression with resultType string). + :paramtype serializer: any + :keyword deserializer: Deserializer. Type: string (or Expression with resultType string). + :paramtype deserializer: any + :keyword file_pattern: File pattern of JSON. To be more specific, the way of separating a + collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. + :paramtype file_pattern: any + :keyword nesting_separator: The character used to separate nesting levels. Default value is '.' + (dot). Type: string (or Expression with resultType string). + :paramtype nesting_separator: any + :keyword encoding_name: The code page name of the preferred encoding. If not provided, the + default value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. + The full list of supported values can be found in the 'Name' column of the table of encodings + in the following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or + Expression with resultType string). + :paramtype encoding_name: any + :keyword json_node_reference: The JSONPath of the JSON array element to be flattened. Example: + "$.ArrayPath". Type: string (or Expression with resultType string). + :paramtype json_node_reference: any + :keyword json_path_definition: The JSONPath definition for each column mapping with a + customized column name to extract data from JSON file. For fields under root object, start with + "$"; for fields inside the array chosen by jsonNodeReference property, start from the array + element. Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. Type: object + (or Expression with resultType object). + :paramtype json_path_definition: any + """ super(JsonFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) self.type = 'JsonFormat' # type: str self.file_pattern = file_pattern @@ -24077,13 +32294,13 @@ class JsonReadSettings(FormatReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param compression_properties: Compression settings. - :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar compression_properties: Compression settings. + :vartype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings """ _validation = { @@ -24103,6 +32320,13 @@ def __init__( compression_properties: Optional["CompressionReadSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword compression_properties: Compression settings. + :paramtype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings + """ super(JsonReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'JsonReadSettings' # type: str self.compression_properties = compression_properties @@ -24113,33 +32337,33 @@ class JsonSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: Json store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: Json format settings. - :type format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings + :vartype disable_metrics_collection: any + :ivar store_settings: Json store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :ivar format_settings: Json format settings. + :vartype format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings """ _validation = { @@ -24173,6 +32397,33 @@ def __init__( format_settings: Optional["JsonWriteSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: Json store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :keyword format_settings: Json format settings. + :paramtype format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings + """ super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'JsonSink' # type: str self.store_settings = store_settings @@ -24184,30 +32435,30 @@ class JsonSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: Json store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param format_settings: Json format settings. - :type format_settings: ~azure.mgmt.datafactory.models.JsonReadSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype disable_metrics_collection: any + :ivar store_settings: Json store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :ivar format_settings: Json format settings. + :vartype format_settings: ~azure.mgmt.datafactory.models.JsonReadSettings + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -24239,6 +32490,30 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: Json store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :keyword format_settings: Json format settings. + :paramtype format_settings: ~azure.mgmt.datafactory.models.JsonReadSettings + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'JsonSource' # type: str self.store_settings = store_settings @@ -24251,14 +32526,14 @@ class JsonWriteSettings(FormatWriteSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param file_pattern: File pattern of JSON. This setting controls the way a collection of JSON + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str + :ivar file_pattern: File pattern of JSON. This setting controls the way a collection of JSON objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. - :type file_pattern: any + :vartype file_pattern: any """ _validation = { @@ -24278,6 +32553,14 @@ def __init__( file_pattern: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword file_pattern: File pattern of JSON. This setting controls the way a collection of JSON + objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. + :paramtype file_pattern: any + """ super(JsonWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'JsonWriteSettings' # type: str self.file_pattern = file_pattern @@ -24322,6 +32605,8 @@ def __init__( self, **kwargs ): + """ + """ super(LinkedIntegrationRuntime, self).__init__(**kwargs) self.name = None self.subscription_id = None @@ -24338,9 +32623,9 @@ class LinkedIntegrationRuntimeType(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param authorization_type: Required. The authorization type for integration runtime + :ivar authorization_type: Required. The authorization type for integration runtime sharing.Constant filled by server. - :type authorization_type: str + :vartype authorization_type: str """ _validation = { @@ -24359,6 +32644,8 @@ def __init__( self, **kwargs ): + """ + """ super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) self.authorization_type = None # type: Optional[str] @@ -24368,11 +32655,11 @@ class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): All required parameters must be populated in order to send to Azure. - :param authorization_type: Required. The authorization type for integration runtime + :ivar authorization_type: Required. The authorization type for integration runtime sharing.Constant filled by server. - :type authorization_type: str - :param key: Required. The key used for authorization. - :type key: ~azure.mgmt.datafactory.models.SecureString + :vartype authorization_type: str + :ivar key: Required. The key used for authorization. + :vartype key: ~azure.mgmt.datafactory.models.SecureString """ _validation = { @@ -24391,6 +32678,10 @@ def __init__( key: "SecureString", **kwargs ): + """ + :keyword key: Required. The key used for authorization. + :paramtype key: ~azure.mgmt.datafactory.models.SecureString + """ super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) self.authorization_type = 'Key' # type: str self.key = key @@ -24401,11 +32692,11 @@ class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): All required parameters must be populated in order to send to Azure. - :param authorization_type: Required. The authorization type for integration runtime + :ivar authorization_type: Required. The authorization type for integration runtime sharing.Constant filled by server. - :type authorization_type: str - :param resource_id: Required. The resource identifier of the integration runtime to be shared. - :type resource_id: str + :vartype authorization_type: str + :ivar resource_id: Required. The resource identifier of the integration runtime to be shared. + :vartype resource_id: str """ _validation = { @@ -24424,6 +32715,11 @@ def __init__( resource_id: str, **kwargs ): + """ + :keyword resource_id: Required. The resource identifier of the integration runtime to be + shared. + :paramtype resource_id: str + """ super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) self.authorization_type = 'RBAC' # type: str self.resource_id = resource_id @@ -24434,8 +32730,8 @@ class LinkedIntegrationRuntimeRequest(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param linked_factory_name: Required. The data factory name for linked integration runtime. - :type linked_factory_name: str + :ivar linked_factory_name: Required. The data factory name for linked integration runtime. + :vartype linked_factory_name: str """ _validation = { @@ -24452,6 +32748,10 @@ def __init__( linked_factory_name: str, **kwargs ): + """ + :keyword linked_factory_name: Required. The data factory name for linked integration runtime. + :paramtype linked_factory_name: str + """ super(LinkedIntegrationRuntimeRequest, self).__init__(**kwargs) self.linked_factory_name = linked_factory_name @@ -24461,10 +32761,10 @@ class LinkedServiceDebugResource(SubResourceDebugResource): All required parameters must be populated in order to send to Azure. - :param name: The resource name. - :type name: str - :param properties: Required. Properties of linked service. - :type properties: ~azure.mgmt.datafactory.models.LinkedService + :ivar name: The resource name. + :vartype name: str + :ivar properties: Required. Properties of linked service. + :vartype properties: ~azure.mgmt.datafactory.models.LinkedService """ _validation = { @@ -24483,6 +32783,12 @@ def __init__( name: Optional[str] = None, **kwargs ): + """ + :keyword name: The resource name. + :paramtype name: str + :keyword properties: Required. Properties of linked service. + :paramtype properties: ~azure.mgmt.datafactory.models.LinkedService + """ super(LinkedServiceDebugResource, self).__init__(name=name, **kwargs) self.properties = properties @@ -24492,10 +32798,10 @@ class LinkedServiceListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of linked services. - :type value: list[~azure.mgmt.datafactory.models.LinkedServiceResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: Required. List of linked services. + :vartype value: list[~azure.mgmt.datafactory.models.LinkedServiceResource] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _validation = { @@ -24514,6 +32820,12 @@ def __init__( next_link: Optional[str] = None, **kwargs ): + """ + :keyword value: Required. List of linked services. + :paramtype value: list[~azure.mgmt.datafactory.models.LinkedServiceResource] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(LinkedServiceListResponse, self).__init__(**kwargs) self.value = value self.next_link = next_link @@ -24528,10 +32840,10 @@ class LinkedServiceReference(msrest.serialization.Model): :ivar type: Linked service reference type. Has constant value: "LinkedServiceReference". :vartype type: str - :param reference_name: Required. Reference LinkedService name. - :type reference_name: str - :param parameters: Arguments for LinkedService. - :type parameters: dict[str, any] + :ivar reference_name: Required. Reference LinkedService name. + :vartype reference_name: str + :ivar parameters: Arguments for LinkedService. + :vartype parameters: dict[str, any] """ _validation = { @@ -24554,6 +32866,12 @@ def __init__( parameters: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword reference_name: Required. Reference LinkedService name. + :paramtype reference_name: str + :keyword parameters: Arguments for LinkedService. + :paramtype parameters: dict[str, any] + """ super(LinkedServiceReference, self).__init__(**kwargs) self.reference_name = reference_name self.parameters = parameters @@ -24574,8 +32892,8 @@ class LinkedServiceResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Required. Properties of linked service. - :type properties: ~azure.mgmt.datafactory.models.LinkedService + :ivar properties: Required. Properties of linked service. + :vartype properties: ~azure.mgmt.datafactory.models.LinkedService """ _validation = { @@ -24600,6 +32918,10 @@ def __init__( properties: "LinkedService", **kwargs ): + """ + :keyword properties: Required. Properties of linked service. + :paramtype properties: ~azure.mgmt.datafactory.models.LinkedService + """ super(LinkedServiceResource, self).__init__(**kwargs) self.properties = properties @@ -24609,11 +32931,11 @@ class LogLocationSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param linked_service_name: Required. Log storage linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param path: The path to storage for storing detailed logs of activity execution. Type: string + :ivar linked_service_name: Required. Log storage linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar path: The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). - :type path: any + :vartype path: any """ _validation = { @@ -24632,6 +32954,13 @@ def __init__( path: Optional[Any] = None, **kwargs ): + """ + :keyword linked_service_name: Required. Log storage linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword path: The path to storage for storing detailed logs of activity execution. Type: + string (or Expression with resultType string). + :paramtype path: any + """ super(LogLocationSettings, self).__init__(**kwargs) self.linked_service_name = linked_service_name self.path = path @@ -24642,14 +32971,14 @@ class LogSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param enable_copy_activity_log: Specifies whether to enable copy activity log. Type: boolean + :ivar enable_copy_activity_log: Specifies whether to enable copy activity log. Type: boolean (or Expression with resultType boolean). - :type enable_copy_activity_log: any - :param copy_activity_log_settings: Specifies settings for copy activity log. - :type copy_activity_log_settings: ~azure.mgmt.datafactory.models.CopyActivityLogSettings - :param log_location_settings: Required. Log location settings customer needs to provide when + :vartype enable_copy_activity_log: any + :ivar copy_activity_log_settings: Specifies settings for copy activity log. + :vartype copy_activity_log_settings: ~azure.mgmt.datafactory.models.CopyActivityLogSettings + :ivar log_location_settings: Required. Log location settings customer needs to provide when enabling log. - :type log_location_settings: ~azure.mgmt.datafactory.models.LogLocationSettings + :vartype log_location_settings: ~azure.mgmt.datafactory.models.LogLocationSettings """ _validation = { @@ -24670,6 +32999,16 @@ def __init__( copy_activity_log_settings: Optional["CopyActivityLogSettings"] = None, **kwargs ): + """ + :keyword enable_copy_activity_log: Specifies whether to enable copy activity log. Type: boolean + (or Expression with resultType boolean). + :paramtype enable_copy_activity_log: any + :keyword copy_activity_log_settings: Specifies settings for copy activity log. + :paramtype copy_activity_log_settings: ~azure.mgmt.datafactory.models.CopyActivityLogSettings + :keyword log_location_settings: Required. Log location settings customer needs to provide when + enabling log. + :paramtype log_location_settings: ~azure.mgmt.datafactory.models.LogLocationSettings + """ super(LogSettings, self).__init__(**kwargs) self.enable_copy_activity_log = enable_copy_activity_log self.copy_activity_log_settings = copy_activity_log_settings @@ -24681,20 +33020,20 @@ class LogStorageSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param linked_service_name: Required. Log storage linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param path: The path to storage for storing detailed logs of activity execution. Type: string + :vartype additional_properties: dict[str, any] + :ivar linked_service_name: Required. Log storage linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar path: The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). - :type path: any - :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or + :vartype path: any + :ivar log_level: Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). - :type log_level: any - :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or + :vartype log_level: any + :ivar enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). - :type enable_reliable_logging: any + :vartype enable_reliable_logging: any """ _validation = { @@ -24719,6 +33058,22 @@ def __init__( enable_reliable_logging: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword linked_service_name: Required. Log storage linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword path: The path to storage for storing detailed logs of activity execution. Type: + string (or Expression with resultType string). + :paramtype path: any + :keyword log_level: Gets or sets the log level, support: Info, Warning. Type: string (or + Expression with resultType string). + :paramtype log_level: any + :keyword enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean + (or Expression with resultType boolean). + :paramtype enable_reliable_logging: any + """ super(LogStorageSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.linked_service_name = linked_service_name @@ -24732,30 +33087,30 @@ class LookupActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param source: Required. Dataset-specific source properties, same as copy activity source. - :type source: ~azure.mgmt.datafactory.models.CopySource - :param dataset: Required. Lookup activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param first_row_only: Whether to return first row or all rows. Default value is true. Type: + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar source: Required. Dataset-specific source properties, same as copy activity source. + :vartype source: ~azure.mgmt.datafactory.models.CopySource + :ivar dataset: Required. Lookup activity dataset reference. + :vartype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :ivar first_row_only: Whether to return first row or all rows. Default value is true. Type: boolean (or Expression with resultType boolean). - :type first_row_only: any + :vartype first_row_only: any """ _validation = { @@ -24794,6 +33149,30 @@ def __init__( first_row_only: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword source: Required. Dataset-specific source properties, same as copy activity source. + :paramtype source: ~azure.mgmt.datafactory.models.CopySource + :keyword dataset: Required. Lookup activity dataset reference. + :paramtype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :keyword first_row_only: Whether to return first row or all rows. Default value is true. Type: + boolean (or Expression with resultType boolean). + :paramtype first_row_only: any + """ super(LookupActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'Lookup' # type: str self.source = source @@ -24806,37 +33185,37 @@ class MagentoLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. The URL of the Magento instance. (i.e. 192.168.222.110/magento3). - :type host: any - :param access_token: The access token from Magento. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. The URL of the Magento instance. (i.e. 192.168.222.110/magento3). + :vartype host: any + :ivar access_token: The access token from Magento. + :vartype access_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -24875,6 +33254,37 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. The URL of the Magento instance. (i.e. 192.168.222.110/magento3). + :paramtype host: any + :keyword access_token: The access token from Magento. + :paramtype access_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(MagentoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Magento' # type: str self.host = host @@ -24890,30 +33300,30 @@ class MagentoObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -24948,6 +33358,30 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(MagentoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'MagentoObject' # type: str self.table_name = table_name @@ -24958,32 +33392,32 @@ class MagentoSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -25015,6 +33449,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MagentoSource' # type: str self.query = query @@ -25025,17 +33485,17 @@ class ManagedIdentityCredential(Credential): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of credential.Constant filled by server. - :type type: str - :param description: Credential description. - :type description: str - :param annotations: List of tags that can be used for describing the Credential. - :type annotations: list[any] - :param resource_id: The resource id of user assigned managed identity. - :type resource_id: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of credential.Constant filled by server. + :vartype type: str + :ivar description: Credential description. + :vartype description: str + :ivar annotations: List of tags that can be used for describing the Credential. + :vartype annotations: list[any] + :ivar resource_id: The resource id of user assigned managed identity. + :vartype resource_id: str """ _validation = { @@ -25059,6 +33519,17 @@ def __init__( resource_id: Optional[str] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Credential description. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the Credential. + :paramtype annotations: list[any] + :keyword resource_id: The resource id of user assigned managed identity. + :paramtype resource_id: str + """ super(ManagedIdentityCredential, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) self.type = 'ManagedIdentity' # type: str self.resource_id = resource_id @@ -25071,27 +33542,27 @@ class ManagedIntegrationRuntime(IntegrationRuntime): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of integration runtime.Constant filled by server. Possible values + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". - :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType - :param description: Integration runtime description. - :type description: str + :vartype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType + :ivar description: Integration runtime description. + :vartype description: str :ivar state: Integration runtime state, only valid for managed dedicated integration runtime. Possible values include: "Initial", "Stopped", "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", "AccessDenied". :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param managed_virtual_network: Managed Virtual Network reference. - :type managed_virtual_network: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkReference - :param compute_properties: The compute resource for managed integration runtime. - :type compute_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeComputeProperties - :param ssis_properties: SSIS properties for managed integration runtime. - :type ssis_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties - :param customer_virtual_network: The name of virtual network to which Azure-SSIS integration + :ivar managed_virtual_network: Managed Virtual Network reference. + :vartype managed_virtual_network: ~azure.mgmt.datafactory.models.ManagedVirtualNetworkReference + :ivar compute_properties: The compute resource for managed integration runtime. + :vartype compute_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeComputeProperties + :ivar ssis_properties: SSIS properties for managed integration runtime. + :vartype ssis_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties + :ivar customer_virtual_network: The name of virtual network to which Azure-SSIS integration runtime will join. - :type customer_virtual_network: + :vartype customer_virtual_network: ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomerVirtualNetwork """ @@ -25122,6 +33593,25 @@ def __init__( customer_virtual_network: Optional["IntegrationRuntimeCustomerVirtualNetwork"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Integration runtime description. + :paramtype description: str + :keyword managed_virtual_network: Managed Virtual Network reference. + :paramtype managed_virtual_network: + ~azure.mgmt.datafactory.models.ManagedVirtualNetworkReference + :keyword compute_properties: The compute resource for managed integration runtime. + :paramtype compute_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeComputeProperties + :keyword ssis_properties: SSIS properties for managed integration runtime. + :paramtype ssis_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties + :keyword customer_virtual_network: The name of virtual network to which Azure-SSIS integration + runtime will join. + :paramtype customer_virtual_network: + ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomerVirtualNetwork + """ super(ManagedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) self.type = 'Managed' # type: str self.state = None @@ -25136,9 +33626,9 @@ class ManagedIntegrationRuntimeError(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] + :vartype additional_properties: dict[str, any] :ivar time: The time when the error occurred. :vartype time: ~datetime.datetime :ivar code: Error code. @@ -25170,6 +33660,11 @@ def __init__( additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(ManagedIntegrationRuntimeError, self).__init__(**kwargs) self.additional_properties = additional_properties self.time = None @@ -25183,16 +33678,16 @@ class ManagedIntegrationRuntimeNode(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] + :vartype additional_properties: dict[str, any] :ivar node_id: The managed integration runtime node id. :vartype node_id: str :ivar status: The managed integration runtime node status. Possible values include: "Starting", "Available", "Recycling", "Unavailable". :vartype status: str or ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNodeStatus - :param errors: The errors that occurred on this integration runtime node. - :type errors: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] + :ivar errors: The errors that occurred on this integration runtime node. + :vartype errors: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] """ _validation = { @@ -25214,6 +33709,13 @@ def __init__( errors: Optional[List["ManagedIntegrationRuntimeError"]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword errors: The errors that occurred on this integration runtime node. + :paramtype errors: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] + """ super(ManagedIntegrationRuntimeNode, self).__init__(**kwargs) self.additional_properties = additional_properties self.node_id = None @@ -25226,9 +33728,9 @@ class ManagedIntegrationRuntimeOperationResult(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] + :vartype additional_properties: dict[str, any] :ivar type: The operation type. Could be start or stop. :vartype type: str :ivar start_time: The start time of the operation. @@ -25268,6 +33770,11 @@ def __init__( additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(ManagedIntegrationRuntimeOperationResult, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = None @@ -25285,12 +33792,12 @@ class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of integration runtime.Constant filled by server. Possible values + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". - :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType + :vartype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType :ivar data_factory_name: The data factory name which the integration runtime belong to. :vartype data_factory_name: str :ivar state: The state of integration runtime. Possible values include: "Initial", "Stopped", @@ -25335,6 +33842,11 @@ def __init__( additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(ManagedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'Managed' # type: str self.create_time = None @@ -25348,20 +33860,20 @@ class ManagedPrivateEndpoint(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param connection_state: The managed private endpoint connection state. - :type connection_state: ~azure.mgmt.datafactory.models.ConnectionStateProperties - :param fqdns: Fully qualified domain names. - :type fqdns: list[str] - :param group_id: The groupId to which the managed private endpoint is created. - :type group_id: str + :vartype additional_properties: dict[str, any] + :ivar connection_state: The managed private endpoint connection state. + :vartype connection_state: ~azure.mgmt.datafactory.models.ConnectionStateProperties + :ivar fqdns: Fully qualified domain names. + :vartype fqdns: list[str] + :ivar group_id: The groupId to which the managed private endpoint is created. + :vartype group_id: str :ivar is_reserved: Denotes whether the managed private endpoint is reserved. :vartype is_reserved: bool - :param private_link_resource_id: The ARM resource ID of the resource to which the managed + :ivar private_link_resource_id: The ARM resource ID of the resource to which the managed private endpoint is created. - :type private_link_resource_id: str + :vartype private_link_resource_id: str :ivar provisioning_state: The managed private endpoint provisioning state. :vartype provisioning_state: str """ @@ -25391,6 +33903,20 @@ def __init__( private_link_resource_id: Optional[str] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connection_state: The managed private endpoint connection state. + :paramtype connection_state: ~azure.mgmt.datafactory.models.ConnectionStateProperties + :keyword fqdns: Fully qualified domain names. + :paramtype fqdns: list[str] + :keyword group_id: The groupId to which the managed private endpoint is created. + :paramtype group_id: str + :keyword private_link_resource_id: The ARM resource ID of the resource to which the managed + private endpoint is created. + :paramtype private_link_resource_id: str + """ super(ManagedPrivateEndpoint, self).__init__(**kwargs) self.additional_properties = additional_properties self.connection_state = connection_state @@ -25406,10 +33932,10 @@ class ManagedPrivateEndpointListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of managed private endpoints. - :type value: list[~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: Required. List of managed private endpoints. + :vartype value: list[~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _validation = { @@ -25428,6 +33954,12 @@ def __init__( next_link: Optional[str] = None, **kwargs ): + """ + :keyword value: Required. List of managed private endpoints. + :paramtype value: list[~azure.mgmt.datafactory.models.ManagedPrivateEndpointResource] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(ManagedPrivateEndpointListResponse, self).__init__(**kwargs) self.value = value self.next_link = next_link @@ -25448,8 +33980,8 @@ class ManagedPrivateEndpointResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Required. Managed private endpoint properties. - :type properties: ~azure.mgmt.datafactory.models.ManagedPrivateEndpoint + :ivar properties: Required. Managed private endpoint properties. + :vartype properties: ~azure.mgmt.datafactory.models.ManagedPrivateEndpoint """ _validation = { @@ -25474,6 +34006,10 @@ def __init__( properties: "ManagedPrivateEndpoint", **kwargs ): + """ + :keyword properties: Required. Managed private endpoint properties. + :paramtype properties: ~azure.mgmt.datafactory.models.ManagedPrivateEndpoint + """ super(ManagedPrivateEndpointResource, self).__init__(**kwargs) self.properties = properties @@ -25483,9 +34019,9 @@ class ManagedVirtualNetwork(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] + :vartype additional_properties: dict[str, any] :ivar v_net_id: Managed Virtual Network ID. :vartype v_net_id: str :ivar alias: Managed Virtual Network alias. @@ -25509,6 +34045,11 @@ def __init__( additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(ManagedVirtualNetwork, self).__init__(**kwargs) self.additional_properties = additional_properties self.v_net_id = None @@ -25520,10 +34061,10 @@ class ManagedVirtualNetworkListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of managed Virtual Networks. - :type value: list[~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: Required. List of managed Virtual Networks. + :vartype value: list[~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _validation = { @@ -25542,6 +34083,12 @@ def __init__( next_link: Optional[str] = None, **kwargs ): + """ + :keyword value: Required. List of managed Virtual Networks. + :paramtype value: list[~azure.mgmt.datafactory.models.ManagedVirtualNetworkResource] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(ManagedVirtualNetworkListResponse, self).__init__(**kwargs) self.value = value self.next_link = next_link @@ -25557,8 +34104,8 @@ class ManagedVirtualNetworkReference(msrest.serialization.Model): :ivar type: Managed Virtual Network reference type. Has constant value: "ManagedVirtualNetworkReference". :vartype type: str - :param reference_name: Required. Reference ManagedVirtualNetwork name. - :type reference_name: str + :ivar reference_name: Required. Reference ManagedVirtualNetwork name. + :vartype reference_name: str """ _validation = { @@ -25579,6 +34126,10 @@ def __init__( reference_name: str, **kwargs ): + """ + :keyword reference_name: Required. Reference ManagedVirtualNetwork name. + :paramtype reference_name: str + """ super(ManagedVirtualNetworkReference, self).__init__(**kwargs) self.reference_name = reference_name @@ -25598,8 +34149,8 @@ class ManagedVirtualNetworkResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Required. Managed Virtual Network properties. - :type properties: ~azure.mgmt.datafactory.models.ManagedVirtualNetwork + :ivar properties: Required. Managed Virtual Network properties. + :vartype properties: ~azure.mgmt.datafactory.models.ManagedVirtualNetwork """ _validation = { @@ -25624,6 +34175,10 @@ def __init__( properties: "ManagedVirtualNetwork", **kwargs ): + """ + :keyword properties: Required. Managed Virtual Network properties. + :paramtype properties: ~azure.mgmt.datafactory.models.ManagedVirtualNetwork + """ super(ManagedVirtualNetworkResource, self).__init__(**kwargs) self.properties = properties @@ -25633,25 +34188,25 @@ class MappingDataFlow(DataFlow): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of data flow.Constant filled by server. - :type type: str - :param description: The description of the data flow. - :type description: str - :param annotations: List of tags that can be used for describing the data flow. - :type annotations: list[any] - :param folder: The folder that this data flow is in. If not specified, Data flow will appear at + :ivar type: Required. Type of data flow.Constant filled by server. + :vartype type: str + :ivar description: The description of the data flow. + :vartype description: str + :ivar annotations: List of tags that can be used for describing the data flow. + :vartype annotations: list[any] + :ivar folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder - :param sources: List of sources in data flow. - :type sources: list[~azure.mgmt.datafactory.models.DataFlowSource] - :param sinks: List of sinks in data flow. - :type sinks: list[~azure.mgmt.datafactory.models.DataFlowSink] - :param transformations: List of transformations in data flow. - :type transformations: list[~azure.mgmt.datafactory.models.Transformation] - :param script: DataFlow script. - :type script: str - :param script_lines: Data flow script lines. - :type script_lines: list[str] + :vartype folder: ~azure.mgmt.datafactory.models.DataFlowFolder + :ivar sources: List of sources in data flow. + :vartype sources: list[~azure.mgmt.datafactory.models.DataFlowSource] + :ivar sinks: List of sinks in data flow. + :vartype sinks: list[~azure.mgmt.datafactory.models.DataFlowSink] + :ivar transformations: List of transformations in data flow. + :vartype transformations: list[~azure.mgmt.datafactory.models.Transformation] + :ivar script: DataFlow script. + :vartype script: str + :ivar script_lines: Data flow script lines. + :vartype script_lines: list[str] """ _validation = { @@ -25683,6 +34238,25 @@ def __init__( script_lines: Optional[List[str]] = None, **kwargs ): + """ + :keyword description: The description of the data flow. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the data flow. + :paramtype annotations: list[any] + :keyword folder: The folder that this data flow is in. If not specified, Data flow will appear + at the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DataFlowFolder + :keyword sources: List of sources in data flow. + :paramtype sources: list[~azure.mgmt.datafactory.models.DataFlowSource] + :keyword sinks: List of sinks in data flow. + :paramtype sinks: list[~azure.mgmt.datafactory.models.DataFlowSink] + :keyword transformations: List of transformations in data flow. + :paramtype transformations: list[~azure.mgmt.datafactory.models.Transformation] + :keyword script: DataFlow script. + :paramtype script: str + :keyword script_lines: Data flow script lines. + :paramtype script_lines: list[str] + """ super(MappingDataFlow, self).__init__(description=description, annotations=annotations, folder=folder, **kwargs) self.type = 'MappingDataFlow' # type: str self.sources = sources @@ -25697,28 +34271,28 @@ class MariaDBLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: An ODBC connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_string: any + :ivar pwd: The Azure key vault secret reference of password in connection string. + :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -25750,6 +34324,28 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword pwd: The Azure key vault secret reference of password in connection string. + :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(MariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'MariaDB' # type: str self.connection_string = connection_string @@ -25762,32 +34358,32 @@ class MariaDBSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -25819,6 +34415,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MariaDBSource' # type: str self.query = query @@ -25829,30 +34451,30 @@ class MariaDBTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -25887,6 +34509,30 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(MariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'MariaDBTable' # type: str self.table_name = table_name @@ -25897,39 +34543,39 @@ class MarketoLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param endpoint: Required. The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com). - :type endpoint: any - :param client_id: Required. The client Id of your Marketo service. - :type client_id: any - :param client_secret: The client secret of your Marketo service. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar endpoint: Required. The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com). + :vartype endpoint: any + :ivar client_id: Required. The client Id of your Marketo service. + :vartype client_id: any + :ivar client_secret: The client secret of your Marketo service. + :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -25971,6 +34617,40 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword endpoint: Required. The endpoint of the Marketo server. (i.e. + 123-ABC-321.mktorest.com). + :paramtype endpoint: any + :keyword client_id: Required. The client Id of your Marketo service. + :paramtype client_id: any + :keyword client_secret: The client secret of your Marketo service. + :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(MarketoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Marketo' # type: str self.endpoint = endpoint @@ -25987,30 +34667,30 @@ class MarketoObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -26045,6 +34725,30 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(MarketoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'MarketoObject' # type: str self.table_name = table_name @@ -26055,32 +34759,32 @@ class MarketoSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -26112,6 +34816,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MarketoSource' # type: str self.query = query @@ -26120,10 +34850,10 @@ def __init__( class MetadataItem(msrest.serialization.Model): """Specify the name and value of custom metadata item. - :param name: Metadata item key name. Type: string (or Expression with resultType string). - :type name: any - :param value: Metadata item value. Type: string (or Expression with resultType string). - :type value: any + :ivar name: Metadata item key name. Type: string (or Expression with resultType string). + :vartype name: any + :ivar value: Metadata item value. Type: string (or Expression with resultType string). + :vartype value: any """ _attribute_map = { @@ -26138,6 +34868,12 @@ def __init__( value: Optional[Any] = None, **kwargs ): + """ + :keyword name: Metadata item key name. Type: string (or Expression with resultType string). + :paramtype name: any + :keyword value: Metadata item value. Type: string (or Expression with resultType string). + :paramtype value: any + """ super(MetadataItem, self).__init__(**kwargs) self.name = name self.value = value @@ -26148,39 +34884,39 @@ class MicrosoftAccessLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The non-access credential portion of the connection string + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param authentication_type: Type of authentication used to connect to the Microsoft Access as + :vartype connection_string: any + :ivar authentication_type: Type of authentication used to connect to the Microsoft Access as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). - :type authentication_type: any - :param credential: The access credential portion of the connection string specified in + :vartype authentication_type: any + :ivar credential: The access credential portion of the connection string specified in driver-specific property-value format. - :type credential: ~azure.mgmt.datafactory.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or Expression with + :vartype credential: ~azure.mgmt.datafactory.models.SecretBase + :ivar user_name: User name for Basic authentication. Type: string (or Expression with resultType string). - :type user_name: any - :param password: Password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype user_name: any + :ivar password: Password for Basic authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -26219,6 +34955,39 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The non-access credential portion of the connection + string as well as an optional encrypted credential. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword authentication_type: Type of authentication used to connect to the Microsoft Access as + ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with + resultType string). + :paramtype authentication_type: any + :keyword credential: The access credential portion of the connection string specified in + driver-specific property-value format. + :paramtype credential: ~azure.mgmt.datafactory.models.SecretBase + :keyword user_name: User name for Basic authentication. Type: string (or Expression with + resultType string). + :paramtype user_name: any + :keyword password: Password for Basic authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(MicrosoftAccessLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'MicrosoftAccess' # type: str self.connection_string = connection_string @@ -26234,32 +35003,32 @@ class MicrosoftAccessSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param pre_copy_script: A query to execute before starting the copy. Type: string (or - Expression with resultType string). - :type pre_copy_script: any + :vartype disable_metrics_collection: any + :ivar pre_copy_script: A query to execute before starting the copy. Type: string (or Expression + with resultType string). + :vartype pre_copy_script: any """ _validation = { @@ -26291,6 +35060,32 @@ def __init__( pre_copy_script: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :paramtype pre_copy_script: any + """ super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MicrosoftAccessSink' # type: str self.pre_copy_script = pre_copy_script @@ -26301,28 +35096,28 @@ class MicrosoftAccessSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: Database query. Type: string (or Expression with resultType string). - :type query: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype disable_metrics_collection: any + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -26352,6 +35147,28 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MicrosoftAccessSource' # type: str self.query = query @@ -26363,31 +35180,31 @@ class MicrosoftAccessTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The Microsoft Access table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype table_name: any """ _validation = { @@ -26422,6 +35239,31 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The Microsoft Access table name. Type: string (or Expression with + resultType string). + :paramtype table_name: any + """ super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'MicrosoftAccessTable' # type: str self.table_name = table_name @@ -26432,31 +35274,31 @@ class MongoDbAtlasCollectionDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param collection: Required. The collection name of the MongoDB Atlas database. Type: string - (or Expression with resultType string). - :type collection: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar collection: Required. The collection name of the MongoDB Atlas database. Type: string (or + Expression with resultType string). + :vartype collection: any """ _validation = { @@ -26492,6 +35334,31 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword collection: Required. The collection name of the MongoDB Atlas database. Type: string + (or Expression with resultType string). + :paramtype collection: any + """ super(MongoDbAtlasCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'MongoDbAtlasCollection' # type: str self.collection = collection @@ -26502,26 +35369,26 @@ class MongoDbAtlasLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The MongoDB Atlas connection string. Type: string, + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The MongoDB Atlas connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param database: Required. The name of the MongoDB Atlas database that you want to access. - Type: string (or Expression with resultType string). - :type database: any + :vartype connection_string: any + :ivar database: Required. The name of the MongoDB Atlas database that you want to access. Type: + string (or Expression with resultType string). + :vartype database: any """ _validation = { @@ -26553,6 +35420,26 @@ def __init__( annotations: Optional[List[Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The MongoDB Atlas connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword database: Required. The name of the MongoDB Atlas database that you want to access. + Type: string (or Expression with resultType string). + :paramtype database: any + """ super(MongoDbAtlasLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'MongoDbAtlas' # type: str self.connection_string = connection_string @@ -26564,33 +35451,33 @@ class MongoDbAtlasSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + :vartype disable_metrics_collection: any + :ivar write_behavior: Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). - :type write_behavior: any + :vartype write_behavior: any """ _validation = { @@ -26622,6 +35509,33 @@ def __init__( write_behavior: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword write_behavior: Specifies whether the document with same key to be overwritten + (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :paramtype write_behavior: any + """ super(MongoDbAtlasSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MongoDbAtlasSink' # type: str self.write_behavior = write_behavior @@ -26632,40 +35546,40 @@ class MongoDbAtlasSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param filter: Specifies selection filter using query operators. To return all documents in a + :vartype disable_metrics_collection: any + :ivar filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). - :type filter: any - :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties - :param batch_size: Specifies the number of documents to return in each batch of the response + :vartype filter: any + :ivar cursor_methods: Cursor methods for Mongodb query. + :vartype cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :ivar batch_size: Specifies the number of documents to return in each batch of the response from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). - :type batch_size: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype batch_size: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -26701,6 +35615,40 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword filter: Specifies selection filter using query operators. To return all documents in a + collection, omit this parameter or pass an empty document ({}). Type: string (or Expression + with resultType string). + :paramtype filter: any + :keyword cursor_methods: Cursor methods for Mongodb query. + :paramtype cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :keyword batch_size: Specifies the number of documents to return in each batch of the response + from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user + or the application. This property's main purpose is to avoid hit the limitation of response + size. Type: integer (or Expression with resultType integer). + :paramtype batch_size: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(MongoDbAtlasSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MongoDbAtlasSource' # type: str self.filter = filter @@ -26715,31 +35663,31 @@ class MongoDbCollectionDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param collection_name: Required. The table name of the MongoDB database. Type: string (or + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar collection_name: Required. The table name of the MongoDB database. Type: string (or Expression with resultType string). - :type collection_name: any + :vartype collection_name: any """ _validation = { @@ -26775,6 +35723,31 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword collection_name: Required. The table name of the MongoDB database. Type: string (or + Expression with resultType string). + :paramtype collection_name: any + """ super(MongoDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'MongoDbCollection' # type: str self.collection_name = collection_name @@ -26783,24 +35756,24 @@ def __init__( class MongoDbCursorMethodsProperties(msrest.serialization.Model): """Cursor methods for Mongodb query. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param project: Specifies the fields to return in the documents that match the query filter. To + :vartype additional_properties: dict[str, any] + :ivar project: Specifies the fields to return in the documents that match the query filter. To return all fields in the matching documents, omit this parameter. Type: string (or Expression with resultType string). - :type project: any - :param sort: Specifies the order in which the query returns matching documents. Type: string - (or Expression with resultType string). Type: string (or Expression with resultType string). - :type sort: any - :param skip: Specifies the how many documents skipped and where MongoDB begins returning + :vartype project: any + :ivar sort: Specifies the order in which the query returns matching documents. Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :vartype sort: any + :ivar skip: Specifies the how many documents skipped and where MongoDB begins returning results. This approach may be useful in implementing paginated results. Type: integer (or Expression with resultType integer). - :type skip: any - :param limit: Specifies the maximum number of documents the server returns. limit() is - analogous to the LIMIT statement in a SQL database. Type: integer (or Expression with - resultType integer). - :type limit: any + :vartype skip: any + :ivar limit: Specifies the maximum number of documents the server returns. limit() is analogous + to the LIMIT statement in a SQL database. Type: integer (or Expression with resultType + integer). + :vartype limit: any """ _attribute_map = { @@ -26821,6 +35794,26 @@ def __init__( limit: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword project: Specifies the fields to return in the documents that match the query filter. + To return all fields in the matching documents, omit this parameter. Type: string (or + Expression with resultType string). + :paramtype project: any + :keyword sort: Specifies the order in which the query returns matching documents. Type: string + (or Expression with resultType string). Type: string (or Expression with resultType string). + :paramtype sort: any + :keyword skip: Specifies the how many documents skipped and where MongoDB begins returning + results. This approach may be useful in implementing paginated results. Type: integer (or + Expression with resultType integer). + :paramtype skip: any + :keyword limit: Specifies the maximum number of documents the server returns. limit() is + analogous to the LIMIT statement in a SQL database. Type: integer (or Expression with + resultType integer). + :paramtype limit: any + """ super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) self.additional_properties = additional_properties self.project = project @@ -26834,49 +35827,49 @@ class MongoDbLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param server: Required. The IP address or server name of the MongoDB server. Type: string (or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar server: Required. The IP address or server name of the MongoDB server. Type: string (or Expression with resultType string). - :type server: any - :param authentication_type: The authentication type to be used to connect to the MongoDB + :vartype server: any + :ivar authentication_type: The authentication type to be used to connect to the MongoDB database. Possible values include: "Basic", "Anonymous". - :type authentication_type: str or ~azure.mgmt.datafactory.models.MongoDbAuthenticationType - :param database_name: Required. The name of the MongoDB database that you want to access. Type: + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.MongoDbAuthenticationType + :ivar database_name: Required. The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). - :type database_name: any - :param username: Username for authentication. Type: string (or Expression with resultType + :vartype database_name: any + :ivar username: Username for authentication. Type: string (or Expression with resultType string). - :type username: any - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param auth_source: Database to verify the username and password. Type: string (or Expression + :vartype username: any + :ivar password: Password for authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar auth_source: Database to verify the username and password. Type: string (or Expression with resultType string). - :type auth_source: any - :param port: The TCP port number that the MongoDB server uses to listen for client connections. + :vartype auth_source: any + :ivar port: The TCP port number that the MongoDB server uses to listen for client connections. The default value is 27017. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: any - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + :vartype port: any + :ivar enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. Type: boolean (or Expression with resultType boolean). - :type enable_ssl: any - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + :vartype enable_ssl: any + :ivar allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. Type: boolean (or Expression with resultType boolean). - :type allow_self_signed_server_cert: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype allow_self_signed_server_cert: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -26924,6 +35917,51 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword server: Required. The IP address or server name of the MongoDB server. Type: string + (or Expression with resultType string). + :paramtype server: any + :keyword authentication_type: The authentication type to be used to connect to the MongoDB + database. Possible values include: "Basic", "Anonymous". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.MongoDbAuthenticationType + :keyword database_name: Required. The name of the MongoDB database that you want to access. + Type: string (or Expression with resultType string). + :paramtype database_name: any + :keyword username: Username for authentication. Type: string (or Expression with resultType + string). + :paramtype username: any + :keyword password: Password for authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword auth_source: Database to verify the username and password. Type: string (or Expression + with resultType string). + :paramtype auth_source: any + :keyword port: The TCP port number that the MongoDB server uses to listen for client + connections. The default value is 27017. Type: integer (or Expression with resultType integer), + minimum: 0. + :paramtype port: any + :keyword enable_ssl: Specifies whether the connections to the server are encrypted using SSL. + The default value is false. Type: boolean (or Expression with resultType boolean). + :paramtype enable_ssl: any + :keyword allow_self_signed_server_cert: Specifies whether to allow self-signed certificates + from the server. The default value is false. Type: boolean (or Expression with resultType + boolean). + :paramtype allow_self_signed_server_cert: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(MongoDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'MongoDb' # type: str self.server = server @@ -26943,29 +35981,29 @@ class MongoDbSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression + :vartype disable_metrics_collection: any + :ivar query: Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). - :type query: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -26995,6 +36033,29 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: Database query. Should be a SQL-92 query expression. Type: string (or + Expression with resultType string). + :paramtype query: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MongoDbSource' # type: str self.query = query @@ -27006,31 +36067,31 @@ class MongoDbV2CollectionDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param collection: Required. The collection name of the MongoDB database. Type: string (or + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar collection: Required. The collection name of the MongoDB database. Type: string (or Expression with resultType string). - :type collection: any + :vartype collection: any """ _validation = { @@ -27066,6 +36127,31 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword collection: Required. The collection name of the MongoDB database. Type: string (or + Expression with resultType string). + :paramtype collection: any + """ super(MongoDbV2CollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'MongoDbV2Collection' # type: str self.collection = collection @@ -27076,25 +36162,25 @@ class MongoDbV2LinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The MongoDB connection string. Type: string, SecureString - or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param database: Required. The name of the MongoDB database that you want to access. Type: + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The MongoDB connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. + :vartype connection_string: any + :ivar database: Required. The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). - :type database: any + :vartype database: any """ _validation = { @@ -27126,6 +36212,25 @@ def __init__( annotations: Optional[List[Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The MongoDB connection string. Type: string, SecureString + or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword database: Required. The name of the MongoDB database that you want to access. Type: + string (or Expression with resultType string). + :paramtype database: any + """ super(MongoDbV2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'MongoDbV2' # type: str self.connection_string = connection_string @@ -27137,33 +36242,33 @@ class MongoDbV2Sink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + :vartype disable_metrics_collection: any + :ivar write_behavior: Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). - :type write_behavior: any + :vartype write_behavior: any """ _validation = { @@ -27195,6 +36300,33 @@ def __init__( write_behavior: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword write_behavior: Specifies whether the document with same key to be overwritten + (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :paramtype write_behavior: any + """ super(MongoDbV2Sink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MongoDbV2Sink' # type: str self.write_behavior = write_behavior @@ -27205,40 +36337,40 @@ class MongoDbV2Source(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param filter: Specifies selection filter using query operators. To return all documents in a + :vartype disable_metrics_collection: any + :ivar filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). - :type filter: any - :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties - :param batch_size: Specifies the number of documents to return in each batch of the response + :vartype filter: any + :ivar cursor_methods: Cursor methods for Mongodb query. + :vartype cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :ivar batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). - :type batch_size: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype batch_size: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -27274,6 +36406,40 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword filter: Specifies selection filter using query operators. To return all documents in a + collection, omit this parameter or pass an empty document ({}). Type: string (or Expression + with resultType string). + :paramtype filter: any + :keyword cursor_methods: Cursor methods for Mongodb query. + :paramtype cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :keyword batch_size: Specifies the number of documents to return in each batch of the response + from MongoDB instance. In most cases, modifying the batch size will not affect the user or the + application. This property's main purpose is to avoid hit the limitation of response size. + Type: integer (or Expression with resultType integer). + :paramtype batch_size: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MongoDbV2Source' # type: str self.filter = filter @@ -27288,27 +36454,27 @@ class MySqlLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The connection string. - :type connection_string: any - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The connection string. + :vartype connection_string: any + :ivar password: The Azure key vault secret reference of password in connection string. + :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -27341,6 +36507,27 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The connection string. + :paramtype connection_string: any + :keyword password: The Azure key vault secret reference of password in connection string. + :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(MySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'MySql' # type: str self.connection_string = connection_string @@ -27353,31 +36540,31 @@ class MySqlSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: Database query. Type: string (or Expression with resultType string). - :type query: any + :vartype additional_columns: any + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: any """ _validation = { @@ -27409,6 +36596,31 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: any + """ super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MySqlSource' # type: str self.query = query @@ -27419,30 +36631,30 @@ class MySqlTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The MySQL table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The MySQL table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -27477,6 +36689,30 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The MySQL table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(MySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'MySqlTable' # type: str self.table_name = table_name @@ -27487,28 +36723,28 @@ class NetezzaLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: An ODBC connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_string: any + :ivar pwd: The Azure key vault secret reference of password in connection string. + :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -27540,6 +36776,28 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword pwd: The Azure key vault secret reference of password in connection string. + :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(NetezzaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Netezza' # type: str self.connection_string = connection_string @@ -27550,17 +36808,17 @@ def __init__( class NetezzaPartitionSettings(msrest.serialization.Model): """The settings that will be leveraged for Netezza source partitioning. - :param partition_column_name: The name of the column in integer type that will be used for + :ivar partition_column_name: The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: any - :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + :vartype partition_column_name: any + :ivar partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_upper_bound: any - :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + :vartype partition_upper_bound: any + :ivar partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_lower_bound: any + :vartype partition_lower_bound: any """ _attribute_map = { @@ -27577,6 +36835,19 @@ def __init__( partition_lower_bound: Optional[Any] = None, **kwargs ): + """ + :keyword partition_column_name: The name of the column in integer type that will be used for + proceeding range partitioning. Type: string (or Expression with resultType string). + :paramtype partition_column_name: any + :keyword partition_upper_bound: The maximum value of column specified in partitionColumnName + that will be used for proceeding range partitioning. Type: string (or Expression with + resultType string). + :paramtype partition_upper_bound: any + :keyword partition_lower_bound: The minimum value of column specified in partitionColumnName + that will be used for proceeding range partitioning. Type: string (or Expression with + resultType string). + :paramtype partition_lower_bound: any + """ super(NetezzaPartitionSettings, self).__init__(**kwargs) self.partition_column_name = partition_column_name self.partition_upper_bound = partition_upper_bound @@ -27588,37 +36859,37 @@ class NetezzaSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any - :param partition_option: The partition mechanism that will be used for Netezza read in - parallel. Possible values include: "None", "DataSlice", "DynamicRange". - :type partition_option: any - :param partition_settings: The settings that will be leveraged for Netezza source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.NetezzaPartitionSettings + :vartype query: any + :ivar partition_option: The partition mechanism that will be used for Netezza read in parallel. + Possible values include: "None", "DataSlice", "DynamicRange". + :vartype partition_option: any + :ivar partition_settings: The settings that will be leveraged for Netezza source partitioning. + :vartype partition_settings: ~azure.mgmt.datafactory.models.NetezzaPartitionSettings """ _validation = { @@ -27654,6 +36925,38 @@ def __init__( partition_settings: Optional["NetezzaPartitionSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + :keyword partition_option: The partition mechanism that will be used for Netezza read in + parallel. Possible values include: "None", "DataSlice", "DynamicRange". + :paramtype partition_option: any + :keyword partition_settings: The settings that will be leveraged for Netezza source + partitioning. + :paramtype partition_settings: ~azure.mgmt.datafactory.models.NetezzaPartitionSettings + """ super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'NetezzaSource' # type: str self.query = query @@ -27666,37 +36969,37 @@ class NetezzaTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param table: The table name of the Netezza. Type: string (or Expression with resultType + :vartype table_name: any + :ivar table: The table name of the Netezza. Type: string (or Expression with resultType string). - :type table: any - :param schema_type_properties_schema: The schema name of the Netezza. Type: string (or + :vartype table: any + :ivar schema_type_properties_schema: The schema name of the Netezza. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any + :vartype schema_type_properties_schema: any """ _validation = { @@ -27735,6 +37038,37 @@ def __init__( schema_type_properties_schema: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword table: The table name of the Netezza. Type: string (or Expression with resultType + string). + :paramtype table: any + :keyword schema_type_properties_schema: The schema name of the Netezza. Type: string (or + Expression with resultType string). + :paramtype schema_type_properties_schema: any + """ super(NetezzaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'NetezzaTable' # type: str self.table_name = table_name @@ -27747,66 +37081,66 @@ class ODataLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param url: Required. The URL of the OData service endpoint. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar url: Required. The URL of the OData service endpoint. Type: string (or Expression with resultType string). - :type url: any - :param authentication_type: Type of authentication used to connect to the OData service. + :vartype url: any + :ivar authentication_type: Type of authentication used to connect to the OData service. Possible values include: "Basic", "Anonymous", "Windows", "AadServicePrincipal", "ManagedServiceIdentity". - :type authentication_type: str or ~azure.mgmt.datafactory.models.ODataAuthenticationType - :param user_name: User name of the OData service. Type: string (or Expression with resultType + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.ODataAuthenticationType + :ivar user_name: User name of the OData service. Type: string (or Expression with resultType string). - :type user_name: any - :param password: Password of the OData service. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param auth_headers: The additional HTTP headers in the request to RESTful API used for + :vartype user_name: any + :ivar password: Password of the OData service. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar auth_headers: The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). - :type auth_headers: any - :param tenant: Specify the tenant information (domain name or tenant ID) under which your + :vartype auth_headers: any + :ivar tenant: Specify the tenant information (domain name or tenant ID) under which your application resides. Type: string (or Expression with resultType string). - :type tenant: any - :param service_principal_id: Specify the application id of your application registered in Azure + :vartype tenant: any + :ivar service_principal_id: Specify the application id of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + :vartype service_principal_id: any + :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: any - :param aad_resource_id: Specify the resource you are requesting authorization to use Directory. + :vartype azure_cloud_type: any + :ivar aad_resource_id: Specify the resource you are requesting authorization to use Directory. Type: string (or Expression with resultType string). - :type aad_resource_id: any - :param aad_service_principal_credential_type: Specify the credential type (key or cert) is used + :vartype aad_resource_id: any + :ivar aad_service_principal_credential_type: Specify the credential type (key or cert) is used for service principal. Possible values include: "ServicePrincipalKey", "ServicePrincipalCert". - :type aad_service_principal_credential_type: str or + :vartype aad_service_principal_credential_type: str or ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType - :param service_principal_key: Specify the secret of your application registered in Azure Active + :ivar service_principal_key: Specify the secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_embedded_cert: Specify the base64 encoded certificate of your + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar service_principal_embedded_cert: Specify the base64 encoded certificate of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). - :type service_principal_embedded_cert: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_embedded_cert_password: Specify the password of your certificate if + :vartype service_principal_embedded_cert: ~azure.mgmt.datafactory.models.SecretBase + :ivar service_principal_embedded_cert_password: Specify the password of your certificate if your certificate has a password and you are using AadServicePrincipal authentication. Type: string (or Expression with resultType string). - :type service_principal_embedded_cert_password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype service_principal_embedded_cert_password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -27861,6 +37195,67 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword url: Required. The URL of the OData service endpoint. Type: string (or Expression with + resultType string). + :paramtype url: any + :keyword authentication_type: Type of authentication used to connect to the OData service. + Possible values include: "Basic", "Anonymous", "Windows", "AadServicePrincipal", + "ManagedServiceIdentity". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.ODataAuthenticationType + :keyword user_name: User name of the OData service. Type: string (or Expression with resultType + string). + :paramtype user_name: any + :keyword password: Password of the OData service. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword auth_headers: The additional HTTP headers in the request to RESTful API used for + authorization. Type: object (or Expression with resultType object). + :paramtype auth_headers: any + :keyword tenant: Specify the tenant information (domain name or tenant ID) under which your + application resides. Type: string (or Expression with resultType string). + :paramtype tenant: any + :keyword service_principal_id: Specify the application id of your application registered in + Azure Active Directory. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. + Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is + the data factory regions’ cloud type. Type: string (or Expression with resultType string). + :paramtype azure_cloud_type: any + :keyword aad_resource_id: Specify the resource you are requesting authorization to use + Directory. Type: string (or Expression with resultType string). + :paramtype aad_resource_id: any + :keyword aad_service_principal_credential_type: Specify the credential type (key or cert) is + used for service principal. Possible values include: "ServicePrincipalKey", + "ServicePrincipalCert". + :paramtype aad_service_principal_credential_type: str or + ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType + :keyword service_principal_key: Specify the secret of your application registered in Azure + Active Directory. Type: string (or Expression with resultType string). + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword service_principal_embedded_cert: Specify the base64 encoded certificate of your + application registered in Azure Active Directory. Type: string (or Expression with resultType + string). + :paramtype service_principal_embedded_cert: ~azure.mgmt.datafactory.models.SecretBase + :keyword service_principal_embedded_cert_password: Specify the password of your certificate if + your certificate has a password and you are using AadServicePrincipal authentication. Type: + string (or Expression with resultType string). + :paramtype service_principal_embedded_cert_password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(ODataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'OData' # type: str self.url = url @@ -27884,30 +37279,30 @@ class ODataResourceDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param path: The OData resource path. Type: string (or Expression with resultType string). - :type path: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar path: The OData resource path. Type: string (or Expression with resultType string). + :vartype path: any """ _validation = { @@ -27942,6 +37337,30 @@ def __init__( path: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword path: The OData resource path. Type: string (or Expression with resultType string). + :paramtype path: any + """ super(ODataResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'ODataResource' # type: str self.path = path @@ -27952,34 +37371,34 @@ class ODataSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType + :vartype disable_metrics_collection: any + :ivar query: OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :type query: any - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + :vartype query: any + :ivar http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype http_request_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -28011,6 +37430,34 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: OData query. For example, "$top=1". Type: string (or Expression with resultType + string). + :paramtype query: any + :keyword http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the + timeout to get a response, not the timeout to read response data. Default value: 00:05:00. + Type: string (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype http_request_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'ODataSource' # type: str self.query = query @@ -28023,38 +37470,38 @@ class OdbcLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The non-access credential portion of the connection string + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param authentication_type: Type of authentication used to connect to the ODBC data store. + :vartype connection_string: any + :ivar authentication_type: Type of authentication used to connect to the ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). - :type authentication_type: any - :param credential: The access credential portion of the connection string specified in + :vartype authentication_type: any + :ivar credential: The access credential portion of the connection string specified in driver-specific property-value format. - :type credential: ~azure.mgmt.datafactory.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or Expression with + :vartype credential: ~azure.mgmt.datafactory.models.SecretBase + :ivar user_name: User name for Basic authentication. Type: string (or Expression with resultType string). - :type user_name: any - :param password: Password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype user_name: any + :ivar password: Password for Basic authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -28093,6 +37540,38 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The non-access credential portion of the connection + string as well as an optional encrypted credential. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword authentication_type: Type of authentication used to connect to the ODBC data store. + Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). + :paramtype authentication_type: any + :keyword credential: The access credential portion of the connection string specified in + driver-specific property-value format. + :paramtype credential: ~azure.mgmt.datafactory.models.SecretBase + :keyword user_name: User name for Basic authentication. Type: string (or Expression with + resultType string). + :paramtype user_name: any + :keyword password: Password for Basic authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(OdbcLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Odbc' # type: str self.connection_string = connection_string @@ -28108,32 +37587,32 @@ class OdbcSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param pre_copy_script: A query to execute before starting the copy. Type: string (or - Expression with resultType string). - :type pre_copy_script: any + :vartype disable_metrics_collection: any + :ivar pre_copy_script: A query to execute before starting the copy. Type: string (or Expression + with resultType string). + :vartype pre_copy_script: any """ _validation = { @@ -28165,6 +37644,32 @@ def __init__( pre_copy_script: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :paramtype pre_copy_script: any + """ super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OdbcSink' # type: str self.pre_copy_script = pre_copy_script @@ -28175,31 +37680,31 @@ class OdbcSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: Database query. Type: string (or Expression with resultType string). - :type query: any + :vartype additional_columns: any + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: any """ _validation = { @@ -28231,6 +37736,31 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: any + """ super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'OdbcSource' # type: str self.query = query @@ -28241,30 +37771,30 @@ class OdbcTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The ODBC table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The ODBC table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -28299,6 +37829,30 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The ODBC table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(OdbcTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'OdbcTable' # type: str self.table_name = table_name @@ -28309,34 +37863,34 @@ class Office365Dataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: Required. Name of the dataset to extract from Office 365. Type: string (or + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: Required. Name of the dataset to extract from Office 365. Type: string (or Expression with resultType string). - :type table_name: any - :param predicate: A predicate expression that can be used to filter the specific rows to - extract from Office 365. Type: string (or Expression with resultType string). - :type predicate: any + :vartype table_name: any + :ivar predicate: A predicate expression that can be used to filter the specific rows to extract + from Office 365. Type: string (or Expression with resultType string). + :vartype predicate: any """ _validation = { @@ -28374,6 +37928,34 @@ def __init__( predicate: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: Required. Name of the dataset to extract from Office 365. Type: string (or + Expression with resultType string). + :paramtype table_name: any + :keyword predicate: A predicate expression that can be used to filter the specific rows to + extract from Office 365. Type: string (or Expression with resultType string). + :paramtype predicate: any + """ super(Office365Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'Office365Table' # type: str self.table_name = table_name @@ -28385,34 +37967,34 @@ class Office365LinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param office365_tenant_id: Required. Azure tenant ID to which the Office 365 account belongs. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar office365_tenant_id: Required. Azure tenant ID to which the Office 365 account belongs. Type: string (or Expression with resultType string). - :type office365_tenant_id: any - :param service_principal_tenant_id: Required. Specify the tenant information under which your + :vartype office365_tenant_id: any + :ivar service_principal_tenant_id: Required. Specify the tenant information under which your Azure AD web application resides. Type: string (or Expression with resultType string). - :type service_principal_tenant_id: any - :param service_principal_id: Required. Specify the application's client ID. Type: string (or + :vartype service_principal_tenant_id: any + :ivar service_principal_id: Required. Specify the application's client ID. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: Required. Specify the application's key. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype service_principal_id: any + :ivar service_principal_key: Required. Specify the application's key. + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -28452,6 +38034,34 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword office365_tenant_id: Required. Azure tenant ID to which the Office 365 account + belongs. Type: string (or Expression with resultType string). + :paramtype office365_tenant_id: any + :keyword service_principal_tenant_id: Required. Specify the tenant information under which your + Azure AD web application resides. Type: string (or Expression with resultType string). + :paramtype service_principal_tenant_id: any + :keyword service_principal_id: Required. Specify the application's client ID. Type: string (or + Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: Required. Specify the application's key. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(Office365LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Office365' # type: str self.office365_tenant_id = office365_tenant_id @@ -28466,42 +38076,42 @@ class Office365Source(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param allowed_groups: The groups containing all the users. Type: array of strings (or + :vartype disable_metrics_collection: any + :ivar allowed_groups: The groups containing all the users. Type: array of strings (or Expression with resultType array of strings). - :type allowed_groups: any - :param user_scope_filter_uri: The user scope uri. Type: string (or Expression with resultType + :vartype allowed_groups: any + :ivar user_scope_filter_uri: The user scope uri. Type: string (or Expression with resultType string). - :type user_scope_filter_uri: any - :param date_filter_column: The Column to apply the :code:`` and + :vartype user_scope_filter_uri: any + :ivar date_filter_column: The Column to apply the :code:`` and :code:``. Type: string (or Expression with resultType string). - :type date_filter_column: any - :param start_time: Start time of the requested range for this dataset. Type: string (or + :vartype date_filter_column: any + :ivar start_time: Start time of the requested range for this dataset. Type: string (or Expression with resultType string). - :type start_time: any - :param end_time: End time of the requested range for this dataset. Type: string (or Expression + :vartype start_time: any + :ivar end_time: End time of the requested range for this dataset. Type: string (or Expression with resultType string). - :type end_time: any - :param output_columns: The columns to be read out from the Office 365 table. Type: array of + :vartype end_time: any + :ivar output_columns: The columns to be read out from the Office 365 table. Type: array of objects (or Expression with resultType array of objects). Example: [ { "name": "Id" }, { "name": "CreatedDateTime" } ]. - :type output_columns: any + :vartype output_columns: any """ _validation = { @@ -28539,6 +38149,42 @@ def __init__( output_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword allowed_groups: The groups containing all the users. Type: array of strings (or + Expression with resultType array of strings). + :paramtype allowed_groups: any + :keyword user_scope_filter_uri: The user scope uri. Type: string (or Expression with resultType + string). + :paramtype user_scope_filter_uri: any + :keyword date_filter_column: The Column to apply the :code:`` and + :code:``. Type: string (or Expression with resultType string). + :paramtype date_filter_column: any + :keyword start_time: Start time of the requested range for this dataset. Type: string (or + Expression with resultType string). + :paramtype start_time: any + :keyword end_time: End time of the requested range for this dataset. Type: string (or + Expression with resultType string). + :paramtype end_time: any + :keyword output_columns: The columns to be read out from the Office 365 table. Type: array of + objects (or Expression with resultType array of objects). Example: [ { "name": "Id" }, { + "name": "CreatedDateTime" } ]. + :paramtype output_columns: any + """ super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'Office365Source' # type: str self.allowed_groups = allowed_groups @@ -28552,14 +38198,14 @@ def __init__( class Operation(msrest.serialization.Model): """Azure Data Factory API operation definition. - :param name: Operation name: {provider}/{resource}/{operation}. - :type name: str - :param origin: The intended executor of the operation. - :type origin: str - :param display: Metadata associated with the operation. - :type display: ~azure.mgmt.datafactory.models.OperationDisplay - :param service_specification: Details about a service operation. - :type service_specification: ~azure.mgmt.datafactory.models.OperationServiceSpecification + :ivar name: Operation name: {provider}/{resource}/{operation}. + :vartype name: str + :ivar origin: The intended executor of the operation. + :vartype origin: str + :ivar display: Metadata associated with the operation. + :vartype display: ~azure.mgmt.datafactory.models.OperationDisplay + :ivar service_specification: Details about a service operation. + :vartype service_specification: ~azure.mgmt.datafactory.models.OperationServiceSpecification """ _attribute_map = { @@ -28578,6 +38224,16 @@ def __init__( service_specification: Optional["OperationServiceSpecification"] = None, **kwargs ): + """ + :keyword name: Operation name: {provider}/{resource}/{operation}. + :paramtype name: str + :keyword origin: The intended executor of the operation. + :paramtype origin: str + :keyword display: Metadata associated with the operation. + :paramtype display: ~azure.mgmt.datafactory.models.OperationDisplay + :keyword service_specification: Details about a service operation. + :paramtype service_specification: ~azure.mgmt.datafactory.models.OperationServiceSpecification + """ super(Operation, self).__init__(**kwargs) self.name = name self.origin = origin @@ -28588,14 +38244,14 @@ def __init__( class OperationDisplay(msrest.serialization.Model): """Metadata associated with the operation. - :param description: The description of the operation. - :type description: str - :param provider: The name of the provider. - :type provider: str - :param resource: The name of the resource type on which the operation is performed. - :type resource: str - :param operation: The type of operation: get, read, delete, etc. - :type operation: str + :ivar description: The description of the operation. + :vartype description: str + :ivar provider: The name of the provider. + :vartype provider: str + :ivar resource: The name of the resource type on which the operation is performed. + :vartype resource: str + :ivar operation: The type of operation: get, read, delete, etc. + :vartype operation: str """ _attribute_map = { @@ -28614,6 +38270,16 @@ def __init__( operation: Optional[str] = None, **kwargs ): + """ + :keyword description: The description of the operation. + :paramtype description: str + :keyword provider: The name of the provider. + :paramtype provider: str + :keyword resource: The name of the resource type on which the operation is performed. + :paramtype resource: str + :keyword operation: The type of operation: get, read, delete, etc. + :paramtype operation: str + """ super(OperationDisplay, self).__init__(**kwargs) self.description = description self.provider = provider @@ -28624,10 +38290,10 @@ def __init__( class OperationListResponse(msrest.serialization.Model): """A list of operations that can be performed by the Data Factory service. - :param value: List of Data Factory operations supported by the Data Factory resource provider. - :type value: list[~azure.mgmt.datafactory.models.Operation] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: List of Data Factory operations supported by the Data Factory resource provider. + :vartype value: list[~azure.mgmt.datafactory.models.Operation] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _attribute_map = { @@ -28642,6 +38308,13 @@ def __init__( next_link: Optional[str] = None, **kwargs ): + """ + :keyword value: List of Data Factory operations supported by the Data Factory resource + provider. + :paramtype value: list[~azure.mgmt.datafactory.models.Operation] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(OperationListResponse, self).__init__(**kwargs) self.value = value self.next_link = next_link @@ -28650,12 +38323,12 @@ def __init__( class OperationLogSpecification(msrest.serialization.Model): """Details about an operation related to logs. - :param name: The name of the log category. - :type name: str - :param display_name: Localized display name. - :type display_name: str - :param blob_duration: Blobs created in the customer storage account, per hour. - :type blob_duration: str + :ivar name: The name of the log category. + :vartype name: str + :ivar display_name: Localized display name. + :vartype display_name: str + :ivar blob_duration: Blobs created in the customer storage account, per hour. + :vartype blob_duration: str """ _attribute_map = { @@ -28672,6 +38345,14 @@ def __init__( blob_duration: Optional[str] = None, **kwargs ): + """ + :keyword name: The name of the log category. + :paramtype name: str + :keyword display_name: Localized display name. + :paramtype display_name: str + :keyword blob_duration: Blobs created in the customer storage account, per hour. + :paramtype blob_duration: str + """ super(OperationLogSpecification, self).__init__(**kwargs) self.name = name self.display_name = display_name @@ -28681,10 +38362,10 @@ def __init__( class OperationMetricAvailability(msrest.serialization.Model): """Defines how often data for a metric becomes available. - :param time_grain: The granularity for the metric. - :type time_grain: str - :param blob_duration: Blob created in the customer storage account, per hour. - :type blob_duration: str + :ivar time_grain: The granularity for the metric. + :vartype time_grain: str + :ivar blob_duration: Blob created in the customer storage account, per hour. + :vartype blob_duration: str """ _attribute_map = { @@ -28699,6 +38380,12 @@ def __init__( blob_duration: Optional[str] = None, **kwargs ): + """ + :keyword time_grain: The granularity for the metric. + :paramtype time_grain: str + :keyword blob_duration: Blob created in the customer storage account, per hour. + :paramtype blob_duration: str + """ super(OperationMetricAvailability, self).__init__(**kwargs) self.time_grain = time_grain self.blob_duration = blob_duration @@ -28707,12 +38394,12 @@ def __init__( class OperationMetricDimension(msrest.serialization.Model): """Defines the metric dimension. - :param name: The name of the dimension for the metric. - :type name: str - :param display_name: The display name of the metric dimension. - :type display_name: str - :param to_be_exported_for_shoebox: Whether the dimension should be exported to Azure Monitor. - :type to_be_exported_for_shoebox: bool + :ivar name: The name of the dimension for the metric. + :vartype name: str + :ivar display_name: The display name of the metric dimension. + :vartype display_name: str + :ivar to_be_exported_for_shoebox: Whether the dimension should be exported to Azure Monitor. + :vartype to_be_exported_for_shoebox: bool """ _attribute_map = { @@ -28729,6 +38416,14 @@ def __init__( to_be_exported_for_shoebox: Optional[bool] = None, **kwargs ): + """ + :keyword name: The name of the dimension for the metric. + :paramtype name: str + :keyword display_name: The display name of the metric dimension. + :paramtype display_name: str + :keyword to_be_exported_for_shoebox: Whether the dimension should be exported to Azure Monitor. + :paramtype to_be_exported_for_shoebox: bool + """ super(OperationMetricDimension, self).__init__(**kwargs) self.name = name self.display_name = display_name @@ -28738,26 +38433,26 @@ def __init__( class OperationMetricSpecification(msrest.serialization.Model): """Details about an operation related to metrics. - :param name: The name of the metric. - :type name: str - :param display_name: Localized display name of the metric. - :type display_name: str - :param display_description: The description of the metric. - :type display_description: str - :param unit: The unit that the metric is measured in. - :type unit: str - :param aggregation_type: The type of metric aggregation. - :type aggregation_type: str - :param enable_regional_mdm_account: Whether or not the service is using regional MDM accounts. - :type enable_regional_mdm_account: str - :param source_mdm_account: The name of the MDM account. - :type source_mdm_account: str - :param source_mdm_namespace: The name of the MDM namespace. - :type source_mdm_namespace: str - :param availabilities: Defines how often data for metrics becomes available. - :type availabilities: list[~azure.mgmt.datafactory.models.OperationMetricAvailability] - :param dimensions: Defines the metric dimension. - :type dimensions: list[~azure.mgmt.datafactory.models.OperationMetricDimension] + :ivar name: The name of the metric. + :vartype name: str + :ivar display_name: Localized display name of the metric. + :vartype display_name: str + :ivar display_description: The description of the metric. + :vartype display_description: str + :ivar unit: The unit that the metric is measured in. + :vartype unit: str + :ivar aggregation_type: The type of metric aggregation. + :vartype aggregation_type: str + :ivar enable_regional_mdm_account: Whether or not the service is using regional MDM accounts. + :vartype enable_regional_mdm_account: str + :ivar source_mdm_account: The name of the MDM account. + :vartype source_mdm_account: str + :ivar source_mdm_namespace: The name of the MDM namespace. + :vartype source_mdm_namespace: str + :ivar availabilities: Defines how often data for metrics becomes available. + :vartype availabilities: list[~azure.mgmt.datafactory.models.OperationMetricAvailability] + :ivar dimensions: Defines the metric dimension. + :vartype dimensions: list[~azure.mgmt.datafactory.models.OperationMetricDimension] """ _attribute_map = { @@ -28788,6 +38483,29 @@ def __init__( dimensions: Optional[List["OperationMetricDimension"]] = None, **kwargs ): + """ + :keyword name: The name of the metric. + :paramtype name: str + :keyword display_name: Localized display name of the metric. + :paramtype display_name: str + :keyword display_description: The description of the metric. + :paramtype display_description: str + :keyword unit: The unit that the metric is measured in. + :paramtype unit: str + :keyword aggregation_type: The type of metric aggregation. + :paramtype aggregation_type: str + :keyword enable_regional_mdm_account: Whether or not the service is using regional MDM + accounts. + :paramtype enable_regional_mdm_account: str + :keyword source_mdm_account: The name of the MDM account. + :paramtype source_mdm_account: str + :keyword source_mdm_namespace: The name of the MDM namespace. + :paramtype source_mdm_namespace: str + :keyword availabilities: Defines how often data for metrics becomes available. + :paramtype availabilities: list[~azure.mgmt.datafactory.models.OperationMetricAvailability] + :keyword dimensions: Defines the metric dimension. + :paramtype dimensions: list[~azure.mgmt.datafactory.models.OperationMetricDimension] + """ super(OperationMetricSpecification, self).__init__(**kwargs) self.name = name self.display_name = display_name @@ -28804,10 +38522,11 @@ def __init__( class OperationServiceSpecification(msrest.serialization.Model): """Details about a service operation. - :param log_specifications: Details about operations related to logs. - :type log_specifications: list[~azure.mgmt.datafactory.models.OperationLogSpecification] - :param metric_specifications: Details about operations related to metrics. - :type metric_specifications: list[~azure.mgmt.datafactory.models.OperationMetricSpecification] + :ivar log_specifications: Details about operations related to logs. + :vartype log_specifications: list[~azure.mgmt.datafactory.models.OperationLogSpecification] + :ivar metric_specifications: Details about operations related to metrics. + :vartype metric_specifications: + list[~azure.mgmt.datafactory.models.OperationMetricSpecification] """ _attribute_map = { @@ -28822,6 +38541,13 @@ def __init__( metric_specifications: Optional[List["OperationMetricSpecification"]] = None, **kwargs ): + """ + :keyword log_specifications: Details about operations related to logs. + :paramtype log_specifications: list[~azure.mgmt.datafactory.models.OperationLogSpecification] + :keyword metric_specifications: Details about operations related to metrics. + :paramtype metric_specifications: + list[~azure.mgmt.datafactory.models.OperationMetricSpecification] + """ super(OperationServiceSpecification, self).__init__(**kwargs) self.log_specifications = log_specifications self.metric_specifications = metric_specifications @@ -28832,34 +38558,34 @@ class OracleCloudStorageLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param access_key_id: The access key identifier of the Oracle Cloud Storage Identity and Access + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar access_key_id: The access key identifier of the Oracle Cloud Storage Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). - :type access_key_id: any - :param secret_access_key: The secret access key of the Oracle Cloud Storage Identity and Access + :vartype access_key_id: any + :ivar secret_access_key: The secret access key of the Oracle Cloud Storage Identity and Access Management (IAM) user. - :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_url: This value specifies the endpoint to access with the Oracle Cloud Storage + :vartype secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar service_url: This value specifies the endpoint to access with the Oracle Cloud Storage Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). - :type service_url: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype service_url: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -28893,6 +38619,34 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword access_key_id: The access key identifier of the Oracle Cloud Storage Identity and + Access Management (IAM) user. Type: string (or Expression with resultType string). + :paramtype access_key_id: any + :keyword secret_access_key: The secret access key of the Oracle Cloud Storage Identity and + Access Management (IAM) user. + :paramtype secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword service_url: This value specifies the endpoint to access with the Oracle Cloud Storage + Connector. This is an optional property; change it only if you want to try a different service + endpoint or want to switch between https and http. Type: string (or Expression with resultType + string). + :paramtype service_url: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(OracleCloudStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'OracleCloudStorage' # type: str self.access_key_id = access_key_id @@ -28906,23 +38660,23 @@ class OracleCloudStorageLocation(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any - :param bucket_name: Specify the bucketName of Oracle Cloud Storage. Type: string (or Expression + :vartype file_name: any + :ivar bucket_name: Specify the bucketName of Oracle Cloud Storage. Type: string (or Expression with resultType string). - :type bucket_name: any - :param version: Specify the version of Oracle Cloud Storage. Type: string (or Expression with + :vartype bucket_name: any + :ivar version: Specify the version of Oracle Cloud Storage. Type: string (or Expression with resultType string). - :type version: any + :vartype version: any """ _validation = { @@ -28948,6 +38702,23 @@ def __init__( version: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + :keyword bucket_name: Specify the bucketName of Oracle Cloud Storage. Type: string (or + Expression with resultType string). + :paramtype bucket_name: any + :keyword version: Specify the version of Oracle Cloud Storage. Type: string (or Expression with + resultType string). + :paramtype version: any + """ super(OracleCloudStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) self.type = 'OracleCloudStorageLocation' # type: str self.bucket_name = bucket_name @@ -28959,47 +38730,47 @@ class OracleCloudStorageReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param wildcard_folder_path: Oracle Cloud Storage wildcardFolderPath. Type: string (or + :vartype recursive: any + :ivar wildcard_folder_path: Oracle Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: any - :param wildcard_file_name: Oracle Cloud Storage wildcardFileName. Type: string (or Expression + :vartype wildcard_folder_path: any + :ivar wildcard_file_name: Oracle Cloud Storage wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: any - :param prefix: The prefix filter for the Oracle Cloud Storage object name. Type: string (or + :vartype wildcard_file_name: any + :ivar prefix: The prefix filter for the Oracle Cloud Storage object name. Type: string (or Expression with resultType string). - :type prefix: any - :param file_list_path: Point to a text file that lists each file (relative path to the path + :vartype prefix: any + :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype file_list_path: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any - :param delete_files_after_completion: Indicates whether the source files need to be deleted + :vartype partition_root_path: any + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: any - :param modified_datetime_start: The start of file's modified datetime. Type: string (or + :vartype delete_files_after_completion: any + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any + :vartype modified_datetime_end: any """ _validation = { @@ -29041,6 +38812,47 @@ def __init__( modified_datetime_end: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword wildcard_folder_path: Oracle Cloud Storage wildcardFolderPath. Type: string (or + Expression with resultType string). + :paramtype wildcard_folder_path: any + :keyword wildcard_file_name: Oracle Cloud Storage wildcardFileName. Type: string (or Expression + with resultType string). + :paramtype wildcard_file_name: any + :keyword prefix: The prefix filter for the Oracle Cloud Storage object name. Type: string (or + Expression with resultType string). + :paramtype prefix: any + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: any + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + """ super(OracleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OracleCloudStorageReadSettings' # type: str self.recursive = recursive @@ -29060,28 +38872,28 @@ class OracleLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_string: any + :ivar password: The Azure key vault secret reference of password in connection string. + :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -29114,6 +38926,28 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword password: The Azure key vault secret reference of password in connection string. + :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(OracleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Oracle' # type: str self.connection_string = connection_string @@ -29124,19 +38958,19 @@ def __init__( class OraclePartitionSettings(msrest.serialization.Model): """The settings that will be leveraged for Oracle source partitioning. - :param partition_names: Names of the physical partitions of Oracle table. - :type partition_names: any - :param partition_column_name: The name of the column in integer type that will be used for + :ivar partition_names: Names of the physical partitions of Oracle table. + :vartype partition_names: any + :ivar partition_column_name: The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: any - :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + :vartype partition_column_name: any + :ivar partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_upper_bound: any - :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + :vartype partition_upper_bound: any + :ivar partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_lower_bound: any + :vartype partition_lower_bound: any """ _attribute_map = { @@ -29155,6 +38989,21 @@ def __init__( partition_lower_bound: Optional[Any] = None, **kwargs ): + """ + :keyword partition_names: Names of the physical partitions of Oracle table. + :paramtype partition_names: any + :keyword partition_column_name: The name of the column in integer type that will be used for + proceeding range partitioning. Type: string (or Expression with resultType string). + :paramtype partition_column_name: any + :keyword partition_upper_bound: The maximum value of column specified in partitionColumnName + that will be used for proceeding range partitioning. Type: string (or Expression with + resultType string). + :paramtype partition_upper_bound: any + :keyword partition_lower_bound: The minimum value of column specified in partitionColumnName + that will be used for proceeding range partitioning. Type: string (or Expression with + resultType string). + :paramtype partition_lower_bound: any + """ super(OraclePartitionSettings, self).__init__(**kwargs) self.partition_names = partition_names self.partition_column_name = partition_column_name @@ -29167,41 +39016,41 @@ class OracleServiceCloudLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. The URL of the Oracle Service Cloud instance. - :type host: any - :param username: Required. The user name that you use to access Oracle Service Cloud server. - :type username: any - :param password: Required. The password corresponding to the user name that you provided in the + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. The URL of the Oracle Service Cloud instance. + :vartype host: any + :ivar username: Required. The user name that you use to access Oracle Service Cloud server. + :vartype username: any + :ivar password: Required. The password corresponding to the user name that you provided in the username key. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -29244,6 +39093,41 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. The URL of the Oracle Service Cloud instance. + :paramtype host: any + :keyword username: Required. The user name that you use to access Oracle Service Cloud server. + :paramtype username: any + :keyword password: Required. The password corresponding to the user name that you provided in + the username key. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. Type: boolean (or Expression with resultType boolean). + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. Type: boolean (or Expression with resultType + boolean). + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(OracleServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'OracleServiceCloud' # type: str self.host = host @@ -29260,30 +39144,30 @@ class OracleServiceCloudObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -29318,6 +39202,30 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(OracleServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'OracleServiceCloudObject' # type: str self.table_name = table_name @@ -29328,32 +39236,32 @@ class OracleServiceCloudSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -29385,6 +39293,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'OracleServiceCloudSource' # type: str self.query = query @@ -29395,32 +39329,32 @@ class OracleSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + :vartype disable_metrics_collection: any + :ivar pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: any + :vartype pre_copy_script: any """ _validation = { @@ -29452,6 +39386,32 @@ def __init__( pre_copy_script: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :paramtype pre_copy_script: any + """ super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OracleSink' # type: str self.pre_copy_script = pre_copy_script @@ -29462,37 +39422,37 @@ class OracleSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType + :vartype disable_metrics_collection: any + :ivar oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType string). - :type oracle_reader_query: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype oracle_reader_query: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param partition_option: The partition mechanism that will be used for Oracle read in parallel. + :vartype query_timeout: any + :ivar partition_option: The partition mechanism that will be used for Oracle read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: any - :param partition_settings: The settings that will be leveraged for Oracle source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.OraclePartitionSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype partition_option: any + :ivar partition_settings: The settings that will be leveraged for Oracle source partitioning. + :vartype partition_settings: ~azure.mgmt.datafactory.models.OraclePartitionSettings + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -29528,6 +39488,38 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType + string). + :paramtype oracle_reader_query: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword partition_option: The partition mechanism that will be used for Oracle read in + parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :paramtype partition_option: any + :keyword partition_settings: The settings that will be leveraged for Oracle source + partitioning. + :paramtype partition_settings: ~azure.mgmt.datafactory.models.OraclePartitionSettings + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OracleSource' # type: str self.oracle_reader_query = oracle_reader_query @@ -29542,37 +39534,37 @@ class OracleTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param schema_type_properties_schema: The schema name of the on-premises Oracle database. Type: + :vartype table_name: any + :ivar schema_type_properties_schema: The schema name of the on-premises Oracle database. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any - :param table: The table name of the on-premises Oracle database. Type: string (or Expression + :vartype schema_type_properties_schema: any + :ivar table: The table name of the on-premises Oracle database. Type: string (or Expression with resultType string). - :type table: any + :vartype table: any """ _validation = { @@ -29611,6 +39603,37 @@ def __init__( table: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword schema_type_properties_schema: The schema name of the on-premises Oracle database. + Type: string (or Expression with resultType string). + :paramtype schema_type_properties_schema: any + :keyword table: The table name of the on-premises Oracle database. Type: string (or Expression + with resultType string). + :paramtype table: any + """ super(OracleTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'OracleTable' # type: str self.table_name = table_name @@ -29623,33 +39646,33 @@ class OrcDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param location: The location of the ORC data storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param orc_compression_codec: The data orcCompressionCodec. Type: string (or Expression with + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar location: The location of the ORC data storage. + :vartype location: ~azure.mgmt.datafactory.models.DatasetLocation + :ivar orc_compression_codec: The data orcCompressionCodec. Type: string (or Expression with resultType string). - :type orc_compression_codec: any + :vartype orc_compression_codec: any """ _validation = { @@ -29686,6 +39709,33 @@ def __init__( orc_compression_codec: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword location: The location of the ORC data storage. + :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation + :keyword orc_compression_codec: The data orcCompressionCodec. Type: string (or Expression with + resultType string). + :paramtype orc_compression_codec: any + """ super(OrcDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'Orc' # type: str self.location = location @@ -29697,15 +39747,15 @@ class OrcFormat(DatasetStorageFormat): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage format.Constant filled by server. - :type type: str - :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: any - :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: any + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage format.Constant filled by server. + :vartype type: str + :ivar serializer: Serializer. Type: string (or Expression with resultType string). + :vartype serializer: any + :ivar deserializer: Deserializer. Type: string (or Expression with resultType string). + :vartype deserializer: any """ _validation = { @@ -29727,6 +39777,15 @@ def __init__( deserializer: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword serializer: Serializer. Type: string (or Expression with resultType string). + :paramtype serializer: any + :keyword deserializer: Deserializer. Type: string (or Expression with resultType string). + :paramtype deserializer: any + """ super(OrcFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) self.type = 'OrcFormat' # type: str @@ -29736,33 +39795,33 @@ class OrcSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: ORC store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: ORC format settings. - :type format_settings: ~azure.mgmt.datafactory.models.OrcWriteSettings + :vartype disable_metrics_collection: any + :ivar store_settings: ORC store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :ivar format_settings: ORC format settings. + :vartype format_settings: ~azure.mgmt.datafactory.models.OrcWriteSettings """ _validation = { @@ -29796,6 +39855,33 @@ def __init__( format_settings: Optional["OrcWriteSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: ORC store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :keyword format_settings: ORC format settings. + :paramtype format_settings: ~azure.mgmt.datafactory.models.OrcWriteSettings + """ super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OrcSink' # type: str self.store_settings = store_settings @@ -29807,28 +39893,28 @@ class OrcSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: ORC store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype disable_metrics_collection: any + :ivar store_settings: ORC store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -29858,6 +39944,28 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: ORC store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(OrcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OrcSource' # type: str self.store_settings = store_settings @@ -29869,18 +39977,18 @@ class OrcWriteSettings(FormatWriteSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str + :ivar max_rows_per_file: Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). - :type max_rows_per_file: any - :param file_name_prefix: Specifies the file name pattern + :vartype max_rows_per_file: any + :ivar file_name_prefix: Specifies the file name pattern :code:``_:code:``.:code:`` when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). - :type file_name_prefix: any + :vartype file_name_prefix: any """ _validation = { @@ -29902,6 +40010,18 @@ def __init__( file_name_prefix: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_rows_per_file: Limit the written file's row count to be smaller than or equal to + the specified count. Type: integer (or Expression with resultType integer). + :paramtype max_rows_per_file: any + :keyword file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :paramtype file_name_prefix: any + """ super(OrcWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'OrcWriteSettings' # type: str self.max_rows_per_file = max_rows_per_file @@ -29913,10 +40033,10 @@ class PackageStore(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param name: Required. The name of the package store. - :type name: str - :param package_store_linked_service: Required. The package store linked service reference. - :type package_store_linked_service: ~azure.mgmt.datafactory.models.EntityReference + :ivar name: Required. The name of the package store. + :vartype name: str + :ivar package_store_linked_service: Required. The package store linked service reference. + :vartype package_store_linked_service: ~azure.mgmt.datafactory.models.EntityReference """ _validation = { @@ -29936,6 +40056,12 @@ def __init__( package_store_linked_service: "EntityReference", **kwargs ): + """ + :keyword name: Required. The name of the package store. + :paramtype name: str + :keyword package_store_linked_service: Required. The package store linked service reference. + :paramtype package_store_linked_service: ~azure.mgmt.datafactory.models.EntityReference + """ super(PackageStore, self).__init__(**kwargs) self.name = name self.package_store_linked_service = package_store_linked_service @@ -29946,11 +40072,11 @@ class ParameterSpecification(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. Parameter type. Possible values include: "Object", "String", "Int", + :ivar type: Required. Parameter type. Possible values include: "Object", "String", "Int", "Float", "Bool", "Array", "SecureString". - :type type: str or ~azure.mgmt.datafactory.models.ParameterType - :param default_value: Default value of parameter. - :type default_value: any + :vartype type: str or ~azure.mgmt.datafactory.models.ParameterType + :ivar default_value: Default value of parameter. + :vartype default_value: any """ _validation = { @@ -29969,6 +40095,13 @@ def __init__( default_value: Optional[Any] = None, **kwargs ): + """ + :keyword type: Required. Parameter type. Possible values include: "Object", "String", "Int", + "Float", "Bool", "Array", "SecureString". + :paramtype type: str or ~azure.mgmt.datafactory.models.ParameterType + :keyword default_value: Default value of parameter. + :paramtype default_value: any + """ super(ParameterSpecification, self).__init__(**kwargs) self.type = type self.default_value = default_value @@ -29979,33 +40112,33 @@ class ParquetDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param location: The location of the parquet storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param compression_codec: The data compressionCodec. Type: string (or Expression with - resultType string). - :type compression_codec: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar location: The location of the parquet storage. + :vartype location: ~azure.mgmt.datafactory.models.DatasetLocation + :ivar compression_codec: The data compressionCodec. Type: string (or Expression with resultType + string). + :vartype compression_codec: any """ _validation = { @@ -30042,6 +40175,33 @@ def __init__( compression_codec: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword location: The location of the parquet storage. + :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation + :keyword compression_codec: The data compressionCodec. Type: string (or Expression with + resultType string). + :paramtype compression_codec: any + """ super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'Parquet' # type: str self.location = location @@ -30053,15 +40213,15 @@ class ParquetFormat(DatasetStorageFormat): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage format.Constant filled by server. - :type type: str - :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: any - :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: any + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage format.Constant filled by server. + :vartype type: str + :ivar serializer: Serializer. Type: string (or Expression with resultType string). + :vartype serializer: any + :ivar deserializer: Deserializer. Type: string (or Expression with resultType string). + :vartype deserializer: any """ _validation = { @@ -30083,6 +40243,15 @@ def __init__( deserializer: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword serializer: Serializer. Type: string (or Expression with resultType string). + :paramtype serializer: any + :keyword deserializer: Deserializer. Type: string (or Expression with resultType string). + :paramtype deserializer: any + """ super(ParquetFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) self.type = 'ParquetFormat' # type: str @@ -30092,33 +40261,33 @@ class ParquetSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: Parquet store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: Parquet format settings. - :type format_settings: ~azure.mgmt.datafactory.models.ParquetWriteSettings + :vartype disable_metrics_collection: any + :ivar store_settings: Parquet store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :ivar format_settings: Parquet format settings. + :vartype format_settings: ~azure.mgmt.datafactory.models.ParquetWriteSettings """ _validation = { @@ -30152,6 +40321,33 @@ def __init__( format_settings: Optional["ParquetWriteSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: Parquet store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :keyword format_settings: Parquet format settings. + :paramtype format_settings: ~azure.mgmt.datafactory.models.ParquetWriteSettings + """ super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'ParquetSink' # type: str self.store_settings = store_settings @@ -30163,28 +40359,28 @@ class ParquetSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: Parquet store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype disable_metrics_collection: any + :ivar store_settings: Parquet store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -30214,6 +40410,28 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: Parquet store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'ParquetSource' # type: str self.store_settings = store_settings @@ -30225,18 +40443,18 @@ class ParquetWriteSettings(FormatWriteSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str + :ivar max_rows_per_file: Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). - :type max_rows_per_file: any - :param file_name_prefix: Specifies the file name pattern + :vartype max_rows_per_file: any + :ivar file_name_prefix: Specifies the file name pattern :code:``_:code:``.:code:`` when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). - :type file_name_prefix: any + :vartype file_name_prefix: any """ _validation = { @@ -30258,6 +40476,18 @@ def __init__( file_name_prefix: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_rows_per_file: Limit the written file's row count to be smaller than or equal to + the specified count. Type: integer (or Expression with resultType integer). + :paramtype max_rows_per_file: any + :keyword file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :paramtype file_name_prefix: any + """ super(ParquetWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'ParquetWriteSettings' # type: str self.max_rows_per_file = max_rows_per_file @@ -30269,39 +40499,39 @@ class PaypalLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). - :type host: any - :param client_id: Required. The client ID associated with your PayPal application. - :type client_id: any - :param client_secret: The client secret associated with your PayPal application. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). + :vartype host: any + :ivar client_id: Required. The client ID associated with your PayPal application. + :vartype client_id: any + :ivar client_secret: The client secret associated with your PayPal application. + :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -30343,6 +40573,39 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). + :paramtype host: any + :keyword client_id: Required. The client ID associated with your PayPal application. + :paramtype client_id: any + :keyword client_secret: The client secret associated with your PayPal application. + :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(PaypalLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Paypal' # type: str self.host = host @@ -30359,30 +40622,30 @@ class PaypalObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -30417,6 +40680,30 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(PaypalObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'PaypalObject' # type: str self.table_name = table_name @@ -30427,32 +40714,32 @@ class PaypalSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -30484,6 +40771,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PaypalSource' # type: str self.query = query @@ -30494,57 +40807,57 @@ class PhoenixLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. The IP address or host name of the Phoenix server. (i.e. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. The IP address or host name of the Phoenix server. (i.e. 192.168.222.160). - :type host: any - :param port: The TCP port that the Phoenix server uses to listen for client connections. The + :vartype host: any + :ivar port: The TCP port that the Phoenix server uses to listen for client connections. The default value is 8765. - :type port: any - :param http_path: The partial URL corresponding to the Phoenix server. (i.e. + :vartype port: any + :ivar http_path: The partial URL corresponding to the Phoenix server. (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using WindowsAzureHDInsightService. - :type http_path: any - :param authentication_type: Required. The authentication mechanism used to connect to the + :vartype http_path: any + :ivar authentication_type: Required. The authentication mechanism used to connect to the Phoenix server. Possible values include: "Anonymous", "UsernameAndPassword", "WindowsAzureHDInsightService". - :type authentication_type: str or ~azure.mgmt.datafactory.models.PhoenixAuthenticationType - :param username: The user name used to connect to the Phoenix server. - :type username: any - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.PhoenixAuthenticationType + :ivar username: The user name used to connect to the Phoenix server. + :vartype username: any + :ivar password: The password corresponding to the user name. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :type enable_ssl: any - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + :vartype enable_ssl: any + :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: any - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + :vartype trusted_cert_path: any + :ivar use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: any - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + :vartype use_system_trust_store: any + :ivar allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: any - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + :vartype allow_host_name_cn_mismatch: any + :ivar allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype allow_self_signed_server_cert: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -30596,6 +40909,57 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. The IP address or host name of the Phoenix server. (i.e. + 192.168.222.160). + :paramtype host: any + :keyword port: The TCP port that the Phoenix server uses to listen for client connections. The + default value is 8765. + :paramtype port: any + :keyword http_path: The partial URL corresponding to the Phoenix server. (i.e. + /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using + WindowsAzureHDInsightService. + :paramtype http_path: any + :keyword authentication_type: Required. The authentication mechanism used to connect to the + Phoenix server. Possible values include: "Anonymous", "UsernameAndPassword", + "WindowsAzureHDInsightService". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.PhoenixAuthenticationType + :keyword username: The user name used to connect to the Phoenix server. + :paramtype username: any + :keyword password: The password corresponding to the user name. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword enable_ssl: Specifies whether the connections to the server are encrypted using SSL. + The default value is false. + :paramtype enable_ssl: any + :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates + for verifying the server when connecting over SSL. This property can only be set when using SSL + on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :paramtype trusted_cert_path: any + :keyword use_system_trust_store: Specifies whether to use a CA certificate from the system + trust store or from a specified PEM file. The default value is false. + :paramtype use_system_trust_store: any + :keyword allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :paramtype allow_host_name_cn_mismatch: any + :keyword allow_self_signed_server_cert: Specifies whether to allow self-signed certificates + from the server. The default value is false. + :paramtype allow_self_signed_server_cert: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(PhoenixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Phoenix' # type: str self.host = host @@ -30617,37 +40981,37 @@ class PhoenixObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param table: The table name of the Phoenix. Type: string (or Expression with resultType + :vartype table_name: any + :ivar table: The table name of the Phoenix. Type: string (or Expression with resultType string). - :type table: any - :param schema_type_properties_schema: The schema name of the Phoenix. Type: string (or + :vartype table: any + :ivar schema_type_properties_schema: The schema name of the Phoenix. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any + :vartype schema_type_properties_schema: any """ _validation = { @@ -30686,6 +41050,37 @@ def __init__( schema_type_properties_schema: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword table: The table name of the Phoenix. Type: string (or Expression with resultType + string). + :paramtype table: any + :keyword schema_type_properties_schema: The schema name of the Phoenix. Type: string (or + Expression with resultType string). + :paramtype schema_type_properties_schema: any + """ super(PhoenixObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'PhoenixObject' # type: str self.table_name = table_name @@ -30698,32 +41093,32 @@ class PhoenixSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -30755,6 +41150,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PhoenixSource' # type: str self.query = query @@ -30763,8 +41184,8 @@ def __init__( class PipelineElapsedTimeMetricPolicy(msrest.serialization.Model): """Pipeline ElapsedTime Metric Policy. - :param duration: TimeSpan value, after which an Azure Monitoring Metric is fired. - :type duration: any + :ivar duration: TimeSpan value, after which an Azure Monitoring Metric is fired. + :vartype duration: any """ _attribute_map = { @@ -30777,6 +41198,10 @@ def __init__( duration: Optional[Any] = None, **kwargs ): + """ + :keyword duration: TimeSpan value, after which an Azure Monitoring Metric is fired. + :paramtype duration: any + """ super(PipelineElapsedTimeMetricPolicy, self).__init__(**kwargs) self.duration = duration @@ -30784,8 +41209,8 @@ def __init__( class PipelineFolder(msrest.serialization.Model): """The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. - :param name: The name of the folder that this Pipeline is in. - :type name: str + :ivar name: The name of the folder that this Pipeline is in. + :vartype name: str """ _attribute_map = { @@ -30798,6 +41223,10 @@ def __init__( name: Optional[str] = None, **kwargs ): + """ + :keyword name: The name of the folder that this Pipeline is in. + :paramtype name: str + """ super(PipelineFolder, self).__init__(**kwargs) self.name = name @@ -30807,10 +41236,10 @@ class PipelineListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of pipelines. - :type value: list[~azure.mgmt.datafactory.models.PipelineResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: Required. List of pipelines. + :vartype value: list[~azure.mgmt.datafactory.models.PipelineResource] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _validation = { @@ -30829,6 +41258,12 @@ def __init__( next_link: Optional[str] = None, **kwargs ): + """ + :keyword value: Required. List of pipelines. + :paramtype value: list[~azure.mgmt.datafactory.models.PipelineResource] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(PipelineListResponse, self).__init__(**kwargs) self.value = value self.next_link = next_link @@ -30837,8 +41272,8 @@ def __init__( class PipelinePolicy(msrest.serialization.Model): """Pipeline Policy. - :param elapsed_time_metric: Pipeline ElapsedTime Metric Policy. - :type elapsed_time_metric: ~azure.mgmt.datafactory.models.PipelineElapsedTimeMetricPolicy + :ivar elapsed_time_metric: Pipeline ElapsedTime Metric Policy. + :vartype elapsed_time_metric: ~azure.mgmt.datafactory.models.PipelineElapsedTimeMetricPolicy """ _attribute_map = { @@ -30851,6 +41286,10 @@ def __init__( elapsed_time_metric: Optional["PipelineElapsedTimeMetricPolicy"] = None, **kwargs ): + """ + :keyword elapsed_time_metric: Pipeline ElapsedTime Metric Policy. + :paramtype elapsed_time_metric: ~azure.mgmt.datafactory.models.PipelineElapsedTimeMetricPolicy + """ super(PipelinePolicy, self).__init__(**kwargs) self.elapsed_time_metric = elapsed_time_metric @@ -30864,10 +41303,10 @@ class PipelineReference(msrest.serialization.Model): :ivar type: Pipeline reference type. Has constant value: "PipelineReference". :vartype type: str - :param reference_name: Required. Reference pipeline name. - :type reference_name: str - :param name: Reference name. - :type name: str + :ivar reference_name: Required. Reference pipeline name. + :vartype reference_name: str + :ivar name: Reference name. + :vartype name: str """ _validation = { @@ -30890,6 +41329,12 @@ def __init__( name: Optional[str] = None, **kwargs ): + """ + :keyword reference_name: Required. Reference pipeline name. + :paramtype reference_name: str + :keyword name: Reference name. + :paramtype name: str + """ super(PipelineReference, self).__init__(**kwargs) self.reference_name = reference_name self.name = name @@ -30908,28 +41353,28 @@ class PipelineResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param description: The description of the pipeline. - :type description: str - :param activities: List of activities in pipeline. - :type activities: list[~azure.mgmt.datafactory.models.Activity] - :param parameters: List of parameters for pipeline. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param variables: List of variables for pipeline. - :type variables: dict[str, ~azure.mgmt.datafactory.models.VariableSpecification] - :param concurrency: The max number of concurrent runs for the pipeline. - :type concurrency: int - :param annotations: List of tags that can be used for describing the Pipeline. - :type annotations: list[any] - :param run_dimensions: Dimensions emitted by Pipeline. - :type run_dimensions: dict[str, any] - :param folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar description: The description of the pipeline. + :vartype description: str + :ivar activities: List of activities in pipeline. + :vartype activities: list[~azure.mgmt.datafactory.models.Activity] + :ivar parameters: List of parameters for pipeline. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar variables: List of variables for pipeline. + :vartype variables: dict[str, ~azure.mgmt.datafactory.models.VariableSpecification] + :ivar concurrency: The max number of concurrent runs for the pipeline. + :vartype concurrency: int + :ivar annotations: List of tags that can be used for describing the Pipeline. + :vartype annotations: list[any] + :ivar run_dimensions: Dimensions emitted by Pipeline. + :vartype run_dimensions: dict[str, any] + :ivar folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.PipelineFolder - :param policy: Pipeline Policy. - :type policy: ~azure.mgmt.datafactory.models.PipelinePolicy + :vartype folder: ~azure.mgmt.datafactory.models.PipelineFolder + :ivar policy: Pipeline Policy. + :vartype policy: ~azure.mgmt.datafactory.models.PipelinePolicy """ _validation = { @@ -30972,6 +41417,30 @@ def __init__( policy: Optional["PipelinePolicy"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: The description of the pipeline. + :paramtype description: str + :keyword activities: List of activities in pipeline. + :paramtype activities: list[~azure.mgmt.datafactory.models.Activity] + :keyword parameters: List of parameters for pipeline. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword variables: List of variables for pipeline. + :paramtype variables: dict[str, ~azure.mgmt.datafactory.models.VariableSpecification] + :keyword concurrency: The max number of concurrent runs for the pipeline. + :paramtype concurrency: int + :keyword annotations: List of tags that can be used for describing the Pipeline. + :paramtype annotations: list[any] + :keyword run_dimensions: Dimensions emitted by Pipeline. + :paramtype run_dimensions: dict[str, any] + :keyword folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.PipelineFolder + :keyword policy: Pipeline Policy. + :paramtype policy: ~azure.mgmt.datafactory.models.PipelinePolicy + """ super(PipelineResource, self).__init__(**kwargs) self.additional_properties = additional_properties self.description = description @@ -30990,9 +41459,9 @@ class PipelineRun(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] + :vartype additional_properties: dict[str, any] :ivar run_id: Identifier of a run. :vartype run_id: str :ivar run_group_id: Identifier that correlates all the recovery runs of a pipeline run. @@ -31062,6 +41531,11 @@ def __init__( additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(PipelineRun, self).__init__(**kwargs) self.additional_properties = additional_properties self.run_id = None @@ -31116,6 +41590,8 @@ def __init__( self, **kwargs ): + """ + """ super(PipelineRunInvokedBy, self).__init__(**kwargs) self.name = None self.id = None @@ -31129,11 +41605,11 @@ class PipelineRunsQueryResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of pipeline runs. - :type value: list[~azure.mgmt.datafactory.models.PipelineRun] - :param continuation_token: The continuation token for getting the next page of results, if any + :ivar value: Required. List of pipeline runs. + :vartype value: list[~azure.mgmt.datafactory.models.PipelineRun] + :ivar continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. - :type continuation_token: str + :vartype continuation_token: str """ _validation = { @@ -31152,6 +41628,13 @@ def __init__( continuation_token: Optional[str] = None, **kwargs ): + """ + :keyword value: Required. List of pipeline runs. + :paramtype value: list[~azure.mgmt.datafactory.models.PipelineRun] + :keyword continuation_token: The continuation token for getting the next page of results, if + any remaining results exist, null otherwise. + :paramtype continuation_token: str + """ super(PipelineRunsQueryResponse, self).__init__(**kwargs) self.value = value self.continuation_token = continuation_token @@ -31160,22 +41643,22 @@ def __init__( class PolybaseSettings(msrest.serialization.Model): """PolyBase settings. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param reject_type: Reject type. Possible values include: "value", "percentage". - :type reject_type: str or ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType - :param reject_value: Specifies the value or the percentage of rows that can be rejected before + :vartype additional_properties: dict[str, any] + :ivar reject_type: Reject type. Possible values include: "value", "percentage". + :vartype reject_type: str or ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType + :ivar reject_value: Specifies the value or the percentage of rows that can be rejected before the query fails. Type: number (or Expression with resultType number), minimum: 0. - :type reject_value: any - :param reject_sample_value: Determines the number of rows to attempt to retrieve before the + :vartype reject_value: any + :ivar reject_sample_value: Determines the number of rows to attempt to retrieve before the PolyBase recalculates the percentage of rejected rows. Type: integer (or Expression with resultType integer), minimum: 0. - :type reject_sample_value: any - :param use_type_default: Specifies how to handle missing values in delimited text files when + :vartype reject_sample_value: any + :ivar use_type_default: Specifies how to handle missing values in delimited text files when PolyBase retrieves data from the text file. Type: boolean (or Expression with resultType boolean). - :type use_type_default: any + :vartype use_type_default: any """ _attribute_map = { @@ -31196,6 +41679,24 @@ def __init__( use_type_default: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword reject_type: Reject type. Possible values include: "value", "percentage". + :paramtype reject_type: str or ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType + :keyword reject_value: Specifies the value or the percentage of rows that can be rejected + before the query fails. Type: number (or Expression with resultType number), minimum: 0. + :paramtype reject_value: any + :keyword reject_sample_value: Determines the number of rows to attempt to retrieve before the + PolyBase recalculates the percentage of rejected rows. Type: integer (or Expression with + resultType integer), minimum: 0. + :paramtype reject_sample_value: any + :keyword use_type_default: Specifies how to handle missing values in delimited text files when + PolyBase retrieves data from the text file. Type: boolean (or Expression with resultType + boolean). + :paramtype use_type_default: any + """ super(PolybaseSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.reject_type = reject_type @@ -31209,27 +41710,27 @@ class PostgreSqlLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The connection string. - :type connection_string: any - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The connection string. + :vartype connection_string: any + :ivar password: The Azure key vault secret reference of password in connection string. + :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -31262,6 +41763,27 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The connection string. + :paramtype connection_string: any + :keyword password: The Azure key vault secret reference of password in connection string. + :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(PostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'PostgreSql' # type: str self.connection_string = connection_string @@ -31274,31 +41796,31 @@ class PostgreSqlSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: Database query. Type: string (or Expression with resultType string). - :type query: any + :vartype additional_columns: any + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: any """ _validation = { @@ -31330,6 +41852,31 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: any + """ super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PostgreSqlSource' # type: str self.query = query @@ -31340,36 +41887,36 @@ class PostgreSqlTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param table: The PostgreSQL table name. Type: string (or Expression with resultType string). - :type table: any - :param schema_type_properties_schema: The PostgreSQL schema name. Type: string (or Expression + :vartype table_name: any + :ivar table: The PostgreSQL table name. Type: string (or Expression with resultType string). + :vartype table: any + :ivar schema_type_properties_schema: The PostgreSQL schema name. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any + :vartype schema_type_properties_schema: any """ _validation = { @@ -31408,6 +41955,36 @@ def __init__( schema_type_properties_schema: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword table: The PostgreSQL table name. Type: string (or Expression with resultType string). + :paramtype table: any + :keyword schema_type_properties_schema: The PostgreSQL schema name. Type: string (or Expression + with resultType string). + :paramtype schema_type_properties_schema: any + """ super(PostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'PostgreSqlTable' # type: str self.table_name = table_name @@ -31420,20 +41997,20 @@ class PowerQuerySink(DataFlowSink): All required parameters must be populated in order to send to Azure. - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. - :type description: str - :param dataset: Dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param linked_service: Linked service reference. - :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param flowlet: Flowlet Reference. - :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference - :param schema_linked_service: Schema linked service reference. - :type schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param script: sink script. - :type script: str + :ivar name: Required. Transformation name. + :vartype name: str + :ivar description: Transformation description. + :vartype description: str + :ivar dataset: Dataset reference. + :vartype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :ivar linked_service: Linked service reference. + :vartype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar flowlet: Flowlet Reference. + :vartype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference + :ivar schema_linked_service: Schema linked service reference. + :vartype schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar script: sink script. + :vartype script: str """ _validation = { @@ -31462,6 +42039,22 @@ def __init__( script: Optional[str] = None, **kwargs ): + """ + :keyword name: Required. Transformation name. + :paramtype name: str + :keyword description: Transformation description. + :paramtype description: str + :keyword dataset: Dataset reference. + :paramtype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :keyword linked_service: Linked service reference. + :paramtype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword flowlet: Flowlet Reference. + :paramtype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference + :keyword schema_linked_service: Schema linked service reference. + :paramtype schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword script: sink script. + :paramtype script: str + """ super(PowerQuerySink, self).__init__(name=name, description=description, dataset=dataset, linked_service=linked_service, flowlet=flowlet, schema_linked_service=schema_linked_service, **kwargs) self.script = script @@ -31469,10 +42062,10 @@ def __init__( class PowerQuerySinkMapping(msrest.serialization.Model): """Map Power Query mashup query to sink dataset(s). - :param query_name: Name of the query in Power Query mashup document. - :type query_name: str - :param dataflow_sinks: List of sinks mapped to Power Query mashup query. - :type dataflow_sinks: list[~azure.mgmt.datafactory.models.PowerQuerySink] + :ivar query_name: Name of the query in Power Query mashup document. + :vartype query_name: str + :ivar dataflow_sinks: List of sinks mapped to Power Query mashup query. + :vartype dataflow_sinks: list[~azure.mgmt.datafactory.models.PowerQuerySink] """ _attribute_map = { @@ -31487,6 +42080,12 @@ def __init__( dataflow_sinks: Optional[List["PowerQuerySink"]] = None, **kwargs ): + """ + :keyword query_name: Name of the query in Power Query mashup document. + :paramtype query_name: str + :keyword dataflow_sinks: List of sinks mapped to Power Query mashup query. + :paramtype dataflow_sinks: list[~azure.mgmt.datafactory.models.PowerQuerySink] + """ super(PowerQuerySinkMapping, self).__init__(**kwargs) self.query_name = query_name self.dataflow_sinks = dataflow_sinks @@ -31497,20 +42096,20 @@ class PowerQuerySource(DataFlowSource): All required parameters must be populated in order to send to Azure. - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. - :type description: str - :param dataset: Dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param linked_service: Linked service reference. - :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param flowlet: Flowlet Reference. - :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference - :param schema_linked_service: Schema linked service reference. - :type schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param script: source script. - :type script: str + :ivar name: Required. Transformation name. + :vartype name: str + :ivar description: Transformation description. + :vartype description: str + :ivar dataset: Dataset reference. + :vartype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :ivar linked_service: Linked service reference. + :vartype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar flowlet: Flowlet Reference. + :vartype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference + :ivar schema_linked_service: Schema linked service reference. + :vartype schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar script: source script. + :vartype script: str """ _validation = { @@ -31539,6 +42138,22 @@ def __init__( script: Optional[str] = None, **kwargs ): + """ + :keyword name: Required. Transformation name. + :paramtype name: str + :keyword description: Transformation description. + :paramtype description: str + :keyword dataset: Dataset reference. + :paramtype dataset: ~azure.mgmt.datafactory.models.DatasetReference + :keyword linked_service: Linked service reference. + :paramtype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword flowlet: Flowlet Reference. + :paramtype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference + :keyword schema_linked_service: Schema linked service reference. + :paramtype schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword script: source script. + :paramtype script: str + """ super(PowerQuerySource, self).__init__(name=name, description=description, dataset=dataset, linked_service=linked_service, flowlet=flowlet, schema_linked_service=schema_linked_service, **kwargs) self.script = script @@ -31548,59 +42163,58 @@ class PrestoLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. The IP address or host name of the Presto server. (i.e. - 192.168.222.160). - :type host: any - :param server_version: Required. The version of the Presto server. (i.e. 0.148-t). - :type server_version: any - :param catalog: Required. The catalog context for all request against the server. - :type catalog: any - :param port: The TCP port that the Presto server uses to listen for client connections. The + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. The IP address or host name of the Presto server. (i.e. 192.168.222.160). + :vartype host: any + :ivar server_version: Required. The version of the Presto server. (i.e. 0.148-t). + :vartype server_version: any + :ivar catalog: Required. The catalog context for all request against the server. + :vartype catalog: any + :ivar port: The TCP port that the Presto server uses to listen for client connections. The default value is 8080. - :type port: any - :param authentication_type: Required. The authentication mechanism used to connect to the - Presto server. Possible values include: "Anonymous", "LDAP". - :type authentication_type: str or ~azure.mgmt.datafactory.models.PrestoAuthenticationType - :param username: The user name used to connect to the Presto server. - :type username: any - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + :vartype port: any + :ivar authentication_type: Required. The authentication mechanism used to connect to the Presto + server. Possible values include: "Anonymous", "LDAP". + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.PrestoAuthenticationType + :ivar username: The user name used to connect to the Presto server. + :vartype username: any + :ivar password: The password corresponding to the user name. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :type enable_ssl: any - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + :vartype enable_ssl: any + :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: any - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + :vartype trusted_cert_path: any + :ivar use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: any - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + :vartype use_system_trust_store: any + :ivar allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: any - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + :vartype allow_host_name_cn_mismatch: any + :ivar allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: any - :param time_zone_id: The local time zone used by the connection. Valid values for this option + :vartype allow_self_signed_server_cert: any + :ivar time_zone_id: The local time zone used by the connection. Valid values for this option are specified in the IANA Time Zone Database. The default value is the system time zone. - :type time_zone_id: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype time_zone_id: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -31658,6 +42272,59 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. The IP address or host name of the Presto server. (i.e. + 192.168.222.160). + :paramtype host: any + :keyword server_version: Required. The version of the Presto server. (i.e. 0.148-t). + :paramtype server_version: any + :keyword catalog: Required. The catalog context for all request against the server. + :paramtype catalog: any + :keyword port: The TCP port that the Presto server uses to listen for client connections. The + default value is 8080. + :paramtype port: any + :keyword authentication_type: Required. The authentication mechanism used to connect to the + Presto server. Possible values include: "Anonymous", "LDAP". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.PrestoAuthenticationType + :keyword username: The user name used to connect to the Presto server. + :paramtype username: any + :keyword password: The password corresponding to the user name. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword enable_ssl: Specifies whether the connections to the server are encrypted using SSL. + The default value is false. + :paramtype enable_ssl: any + :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates + for verifying the server when connecting over SSL. This property can only be set when using SSL + on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :paramtype trusted_cert_path: any + :keyword use_system_trust_store: Specifies whether to use a CA certificate from the system + trust store or from a specified PEM file. The default value is false. + :paramtype use_system_trust_store: any + :keyword allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :paramtype allow_host_name_cn_mismatch: any + :keyword allow_self_signed_server_cert: Specifies whether to allow self-signed certificates + from the server. The default value is false. + :paramtype allow_self_signed_server_cert: any + :keyword time_zone_id: The local time zone used by the connection. Valid values for this option + are specified in the IANA Time Zone Database. The default value is the system time zone. + :paramtype time_zone_id: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(PrestoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Presto' # type: str self.host = host @@ -31681,37 +42348,36 @@ class PrestoObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param table: The table name of the Presto. Type: string (or Expression with resultType - string). - :type table: any - :param schema_type_properties_schema: The schema name of the Presto. Type: string (or - Expression with resultType string). - :type schema_type_properties_schema: any + :vartype table_name: any + :ivar table: The table name of the Presto. Type: string (or Expression with resultType string). + :vartype table: any + :ivar schema_type_properties_schema: The schema name of the Presto. Type: string (or Expression + with resultType string). + :vartype schema_type_properties_schema: any """ _validation = { @@ -31750,6 +42416,37 @@ def __init__( schema_type_properties_schema: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword table: The table name of the Presto. Type: string (or Expression with resultType + string). + :paramtype table: any + :keyword schema_type_properties_schema: The schema name of the Presto. Type: string (or + Expression with resultType string). + :paramtype schema_type_properties_schema: any + """ super(PrestoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'PrestoObject' # type: str self.table_name = table_name @@ -31762,32 +42459,32 @@ class PrestoSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -31819,6 +42516,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PrestoSource' # type: str self.query = query @@ -31829,10 +42552,10 @@ class PrivateEndpointConnectionListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of Private Endpoint Connections. - :type value: list[~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: Required. List of Private Endpoint Connections. + :vartype value: list[~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _validation = { @@ -31851,6 +42574,12 @@ def __init__( next_link: Optional[str] = None, **kwargs ): + """ + :keyword value: Required. List of Private Endpoint Connections. + :paramtype value: list[~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(PrivateEndpointConnectionListResponse, self).__init__(**kwargs) self.value = value self.next_link = next_link @@ -31869,8 +42598,8 @@ class PrivateEndpointConnectionResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Core resource properties. - :type properties: ~azure.mgmt.datafactory.models.RemotePrivateEndpointConnection + :ivar properties: Core resource properties. + :vartype properties: ~azure.mgmt.datafactory.models.RemotePrivateEndpointConnection """ _validation = { @@ -31894,6 +42623,10 @@ def __init__( properties: Optional["RemotePrivateEndpointConnection"] = None, **kwargs ): + """ + :keyword properties: Core resource properties. + :paramtype properties: ~azure.mgmt.datafactory.models.RemotePrivateEndpointConnection + """ super(PrivateEndpointConnectionResource, self).__init__(**kwargs) self.properties = properties @@ -31901,8 +42634,8 @@ def __init__( class PrivateLinkConnectionApprovalRequest(msrest.serialization.Model): """A request to approve or reject a private endpoint connection. - :param private_link_service_connection_state: The state of a private link connection. - :type private_link_service_connection_state: + :ivar private_link_service_connection_state: The state of a private link connection. + :vartype private_link_service_connection_state: ~azure.mgmt.datafactory.models.PrivateLinkConnectionState """ @@ -31916,6 +42649,11 @@ def __init__( private_link_service_connection_state: Optional["PrivateLinkConnectionState"] = None, **kwargs ): + """ + :keyword private_link_service_connection_state: The state of a private link connection. + :paramtype private_link_service_connection_state: + ~azure.mgmt.datafactory.models.PrivateLinkConnectionState + """ super(PrivateLinkConnectionApprovalRequest, self).__init__(**kwargs) self.private_link_service_connection_state = private_link_service_connection_state @@ -31933,8 +42671,8 @@ class PrivateLinkConnectionApprovalRequestResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Core resource properties. - :type properties: ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequest + :ivar properties: Core resource properties. + :vartype properties: ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequest """ _validation = { @@ -31958,6 +42696,10 @@ def __init__( properties: Optional["PrivateLinkConnectionApprovalRequest"] = None, **kwargs ): + """ + :keyword properties: Core resource properties. + :paramtype properties: ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequest + """ super(PrivateLinkConnectionApprovalRequestResource, self).__init__(**kwargs) self.properties = properties @@ -31965,12 +42707,12 @@ def __init__( class PrivateLinkConnectionState(msrest.serialization.Model): """The state of a private link connection. - :param status: Status of a private link connection. - :type status: str - :param description: Description of a private link connection. - :type description: str - :param actions_required: ActionsRequired for a private link connection. - :type actions_required: str + :ivar status: Status of a private link connection. + :vartype status: str + :ivar description: Description of a private link connection. + :vartype description: str + :ivar actions_required: ActionsRequired for a private link connection. + :vartype actions_required: str """ _attribute_map = { @@ -31987,6 +42729,14 @@ def __init__( actions_required: Optional[str] = None, **kwargs ): + """ + :keyword status: Status of a private link connection. + :paramtype status: str + :keyword description: Description of a private link connection. + :paramtype description: str + :keyword actions_required: ActionsRequired for a private link connection. + :paramtype actions_required: str + """ super(PrivateLinkConnectionState, self).__init__(**kwargs) self.status = status self.description = description @@ -32006,8 +42756,8 @@ class PrivateLinkResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Core resource properties. - :type properties: ~azure.mgmt.datafactory.models.PrivateLinkResourceProperties + :ivar properties: Core resource properties. + :vartype properties: ~azure.mgmt.datafactory.models.PrivateLinkResourceProperties """ _validation = { @@ -32031,6 +42781,10 @@ def __init__( properties: Optional["PrivateLinkResourceProperties"] = None, **kwargs ): + """ + :keyword properties: Core resource properties. + :paramtype properties: ~azure.mgmt.datafactory.models.PrivateLinkResourceProperties + """ super(PrivateLinkResource, self).__init__(**kwargs) self.properties = properties @@ -32064,6 +42818,8 @@ def __init__( self, **kwargs ): + """ + """ super(PrivateLinkResourceProperties, self).__init__(**kwargs) self.group_id = None self.required_members = None @@ -32075,8 +42831,8 @@ class PrivateLinkResourcesWrapper(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. - :type value: list[~azure.mgmt.datafactory.models.PrivateLinkResource] + :ivar value: Required. + :vartype value: list[~azure.mgmt.datafactory.models.PrivateLinkResource] """ _validation = { @@ -32093,6 +42849,10 @@ def __init__( value: List["PrivateLinkResource"], **kwargs ): + """ + :keyword value: Required. + :paramtype value: list[~azure.mgmt.datafactory.models.PrivateLinkResource] + """ super(PrivateLinkResourcesWrapper, self).__init__(**kwargs) self.value = value @@ -32100,10 +42860,10 @@ def __init__( class QueryDataFlowDebugSessionsResponse(msrest.serialization.Model): """A list of active debug sessions. - :param value: Array with all active debug sessions. - :type value: list[~azure.mgmt.datafactory.models.DataFlowDebugSessionInfo] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: Array with all active debug sessions. + :vartype value: list[~azure.mgmt.datafactory.models.DataFlowDebugSessionInfo] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _attribute_map = { @@ -32118,6 +42878,12 @@ def __init__( next_link: Optional[str] = None, **kwargs ): + """ + :keyword value: Array with all active debug sessions. + :paramtype value: list[~azure.mgmt.datafactory.models.DataFlowDebugSessionInfo] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(QueryDataFlowDebugSessionsResponse, self).__init__(**kwargs) self.value = value self.next_link = next_link @@ -32128,41 +42894,41 @@ class QuickBooksLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_properties: Properties used to connect to QuickBooks. It is mutually - exclusive with any other properties in the linked service. Type: object. - :type connection_properties: any - :param endpoint: The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com). - :type endpoint: any - :param company_id: The company ID of the QuickBooks company to authorize. - :type company_id: any - :param consumer_key: The consumer key for OAuth 1.0 authentication. - :type consumer_key: any - :param consumer_secret: The consumer secret for OAuth 1.0 authentication. - :type consumer_secret: ~azure.mgmt.datafactory.models.SecretBase - :param access_token: The access token for OAuth 1.0 authentication. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param access_token_secret: The access token secret for OAuth 1.0 authentication. - :type access_token_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_properties: Properties used to connect to QuickBooks. It is mutually exclusive + with any other properties in the linked service. Type: object. + :vartype connection_properties: any + :ivar endpoint: The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com). + :vartype endpoint: any + :ivar company_id: The company ID of the QuickBooks company to authorize. + :vartype company_id: any + :ivar consumer_key: The consumer key for OAuth 1.0 authentication. + :vartype consumer_key: any + :ivar consumer_secret: The consumer secret for OAuth 1.0 authentication. + :vartype consumer_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar access_token: The access token for OAuth 1.0 authentication. + :vartype access_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar access_token_secret: The access token secret for OAuth 1.0 authentication. + :vartype access_token_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_encrypted_endpoints: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -32206,6 +42972,41 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_properties: Properties used to connect to QuickBooks. It is mutually + exclusive with any other properties in the linked service. Type: object. + :paramtype connection_properties: any + :keyword endpoint: The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com). + :paramtype endpoint: any + :keyword company_id: The company ID of the QuickBooks company to authorize. + :paramtype company_id: any + :keyword consumer_key: The consumer key for OAuth 1.0 authentication. + :paramtype consumer_key: any + :keyword consumer_secret: The consumer secret for OAuth 1.0 authentication. + :paramtype consumer_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword access_token: The access token for OAuth 1.0 authentication. + :paramtype access_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword access_token_secret: The access token secret for OAuth 1.0 authentication. + :paramtype access_token_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(QuickBooksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'QuickBooks' # type: str self.connection_properties = connection_properties @@ -32224,30 +43025,30 @@ class QuickBooksObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -32282,6 +43083,30 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(QuickBooksObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'QuickBooksObject' # type: str self.table_name = table_name @@ -32292,32 +43117,32 @@ class QuickBooksSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -32349,6 +43174,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'QuickBooksSource' # type: str self.query = query @@ -32357,19 +43208,19 @@ def __init__( class RecurrenceSchedule(msrest.serialization.Model): """The recurrence schedule. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param minutes: The minutes. - :type minutes: list[int] - :param hours: The hours. - :type hours: list[int] - :param week_days: The days of the week. - :type week_days: list[str or ~azure.mgmt.datafactory.models.DaysOfWeek] - :param month_days: The month days. - :type month_days: list[int] - :param monthly_occurrences: The monthly occurrences. - :type monthly_occurrences: list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] + :vartype additional_properties: dict[str, any] + :ivar minutes: The minutes. + :vartype minutes: list[int] + :ivar hours: The hours. + :vartype hours: list[int] + :ivar week_days: The days of the week. + :vartype week_days: list[str or ~azure.mgmt.datafactory.models.DaysOfWeek] + :ivar month_days: The month days. + :vartype month_days: list[int] + :ivar monthly_occurrences: The monthly occurrences. + :vartype monthly_occurrences: list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] """ _attribute_map = { @@ -32392,6 +43243,22 @@ def __init__( monthly_occurrences: Optional[List["RecurrenceScheduleOccurrence"]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword minutes: The minutes. + :paramtype minutes: list[int] + :keyword hours: The hours. + :paramtype hours: list[int] + :keyword week_days: The days of the week. + :paramtype week_days: list[str or ~azure.mgmt.datafactory.models.DaysOfWeek] + :keyword month_days: The month days. + :paramtype month_days: list[int] + :keyword monthly_occurrences: The monthly occurrences. + :paramtype monthly_occurrences: + list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] + """ super(RecurrenceSchedule, self).__init__(**kwargs) self.additional_properties = additional_properties self.minutes = minutes @@ -32404,14 +43271,14 @@ def __init__( class RecurrenceScheduleOccurrence(msrest.serialization.Model): """The recurrence schedule occurrence. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param day: The day of the week. Possible values include: "Sunday", "Monday", "Tuesday", + :vartype additional_properties: dict[str, any] + :ivar day: The day of the week. Possible values include: "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday". - :type day: str or ~azure.mgmt.datafactory.models.DayOfWeek - :param occurrence: The occurrence. - :type occurrence: int + :vartype day: str or ~azure.mgmt.datafactory.models.DayOfWeek + :ivar occurrence: The occurrence. + :vartype occurrence: int """ _attribute_map = { @@ -32428,6 +43295,16 @@ def __init__( occurrence: Optional[int] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword day: The day of the week. Possible values include: "Sunday", "Monday", "Tuesday", + "Wednesday", "Thursday", "Friday", "Saturday". + :paramtype day: str or ~azure.mgmt.datafactory.models.DayOfWeek + :keyword occurrence: The occurrence. + :paramtype occurrence: int + """ super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) self.additional_properties = additional_properties self.day = day @@ -32439,17 +43316,17 @@ class RedirectIncompatibleRowSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param linked_service_name: Required. Name of the Azure Storage, Storage SAS, or Azure Data - Lake Store linked service used for redirecting incompatible row. Must be specified if + :vartype additional_properties: dict[str, any] + :ivar linked_service_name: Required. Name of the Azure Storage, Storage SAS, or Azure Data Lake + Store linked service used for redirecting incompatible row. Must be specified if redirectIncompatibleRowSettings is specified. Type: string (or Expression with resultType string). - :type linked_service_name: any - :param path: The path for storing the redirect incompatible row data. Type: string (or + :vartype linked_service_name: any + :ivar path: The path for storing the redirect incompatible row data. Type: string (or Expression with resultType string). - :type path: any + :vartype path: any """ _validation = { @@ -32470,6 +43347,19 @@ def __init__( path: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword linked_service_name: Required. Name of the Azure Storage, Storage SAS, or Azure Data + Lake Store linked service used for redirecting incompatible row. Must be specified if + redirectIncompatibleRowSettings is specified. Type: string (or Expression with resultType + string). + :paramtype linked_service_name: any + :keyword path: The path for storing the redirect incompatible row data. Type: string (or + Expression with resultType string). + :paramtype path: any + """ super(RedirectIncompatibleRowSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.linked_service_name = linked_service_name @@ -32481,13 +43371,13 @@ class RedshiftUnloadSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param s3_linked_service_name: Required. The name of the Amazon S3 linked service which will be + :ivar s3_linked_service_name: Required. The name of the Amazon S3 linked service which will be used for the unload operation when copying from the Amazon Redshift source. - :type s3_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param bucket_name: Required. The bucket of the interim Amazon S3 which will be used to store + :vartype s3_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar bucket_name: Required. The bucket of the interim Amazon S3 which will be used to store the unloaded data from Amazon Redshift source. The bucket must be in the same region as the Amazon Redshift source. Type: string (or Expression with resultType string). - :type bucket_name: any + :vartype bucket_name: any """ _validation = { @@ -32507,6 +43397,15 @@ def __init__( bucket_name: Any, **kwargs ): + """ + :keyword s3_linked_service_name: Required. The name of the Amazon S3 linked service which will + be used for the unload operation when copying from the Amazon Redshift source. + :paramtype s3_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword bucket_name: Required. The bucket of the interim Amazon S3 which will be used to store + the unloaded data from Amazon Redshift source. The bucket must be in the same region as the + Amazon Redshift source. Type: string (or Expression with resultType string). + :paramtype bucket_name: any + """ super(RedshiftUnloadSettings, self).__init__(**kwargs) self.s3_linked_service_name = s3_linked_service_name self.bucket_name = bucket_name @@ -32517,28 +43416,28 @@ class RelationalSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: Database query. Type: string (or Expression with resultType string). - :type query: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype disable_metrics_collection: any + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -32568,6 +43467,28 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'RelationalSource' # type: str self.query = query @@ -32579,31 +43500,31 @@ class RelationalTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The relational table name. Type: string (or Expression with resultType + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The relational table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype table_name: any """ _validation = { @@ -32638,6 +43559,31 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The relational table name. Type: string (or Expression with resultType + string). + :paramtype table_name: any + """ super(RelationalTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'RelationalTable' # type: str self.table_name = table_name @@ -32650,10 +43596,10 @@ class RemotePrivateEndpointConnection(msrest.serialization.Model): :ivar provisioning_state: :vartype provisioning_state: str - :param private_endpoint: PrivateEndpoint of a remote private endpoint connection. - :type private_endpoint: ~azure.mgmt.datafactory.models.ArmIdWrapper - :param private_link_service_connection_state: The state of a private link connection. - :type private_link_service_connection_state: + :ivar private_endpoint: PrivateEndpoint of a remote private endpoint connection. + :vartype private_endpoint: ~azure.mgmt.datafactory.models.ArmIdWrapper + :ivar private_link_service_connection_state: The state of a private link connection. + :vartype private_link_service_connection_state: ~azure.mgmt.datafactory.models.PrivateLinkConnectionState """ @@ -32674,6 +43620,13 @@ def __init__( private_link_service_connection_state: Optional["PrivateLinkConnectionState"] = None, **kwargs ): + """ + :keyword private_endpoint: PrivateEndpoint of a remote private endpoint connection. + :paramtype private_endpoint: ~azure.mgmt.datafactory.models.ArmIdWrapper + :keyword private_link_service_connection_state: The state of a private link connection. + :paramtype private_link_service_connection_state: + ~azure.mgmt.datafactory.models.PrivateLinkConnectionState + """ super(RemotePrivateEndpointConnection, self).__init__(**kwargs) self.provisioning_state = None self.private_endpoint = private_endpoint @@ -32687,29 +43640,29 @@ class RerunTumblingWindowTrigger(Trigger): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. Trigger type.Constant filled by server. + :vartype type: str + :ivar description: Trigger description. + :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[any] - :param parent_trigger: Required. The parent trigger reference. - :type parent_trigger: any - :param requested_start_time: Required. The start time for the time period for which restatement + :ivar annotations: List of tags that can be used for describing the trigger. + :vartype annotations: list[any] + :ivar parent_trigger: Required. The parent trigger reference. + :vartype parent_trigger: any + :ivar requested_start_time: Required. The start time for the time period for which restatement is initiated. Only UTC time is currently supported. - :type requested_start_time: ~datetime.datetime - :param requested_end_time: Required. The end time for the time period for which restatement is + :vartype requested_start_time: ~datetime.datetime + :ivar requested_end_time: Required. The end time for the time period for which restatement is initiated. Only UTC time is currently supported. - :type requested_end_time: ~datetime.datetime - :param rerun_concurrency: Required. The max number of parallel time windows (ready for + :vartype requested_end_time: ~datetime.datetime + :ivar rerun_concurrency: Required. The max number of parallel time windows (ready for execution) for which a rerun is triggered. - :type rerun_concurrency: int + :vartype rerun_concurrency: int """ _validation = { @@ -32745,6 +43698,26 @@ def __init__( annotations: Optional[List[Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Trigger description. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the trigger. + :paramtype annotations: list[any] + :keyword parent_trigger: Required. The parent trigger reference. + :paramtype parent_trigger: any + :keyword requested_start_time: Required. The start time for the time period for which + restatement is initiated. Only UTC time is currently supported. + :paramtype requested_start_time: ~datetime.datetime + :keyword requested_end_time: Required. The end time for the time period for which restatement + is initiated. Only UTC time is currently supported. + :paramtype requested_end_time: ~datetime.datetime + :keyword rerun_concurrency: Required. The max number of parallel time windows (ready for + execution) for which a rerun is triggered. + :paramtype rerun_concurrency: int + """ super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) self.type = 'RerunTumblingWindowTrigger' # type: str self.parent_trigger = parent_trigger @@ -32758,42 +43731,42 @@ class ResponsysLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param endpoint: Required. The endpoint of the Responsys server. - :type endpoint: any - :param client_id: Required. The client ID associated with the Responsys application. Type: + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar endpoint: Required. The endpoint of the Responsys server. + :vartype endpoint: any + :ivar client_id: Required. The client ID associated with the Responsys application. Type: string (or Expression with resultType string). - :type client_id: any - :param client_secret: The client secret associated with the Responsys application. Type: string + :vartype client_id: any + :ivar client_secret: The client secret associated with the Responsys application. Type: string (or Expression with resultType string). - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -32835,6 +43808,42 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword endpoint: Required. The endpoint of the Responsys server. + :paramtype endpoint: any + :keyword client_id: Required. The client ID associated with the Responsys application. Type: + string (or Expression with resultType string). + :paramtype client_id: any + :keyword client_secret: The client secret associated with the Responsys application. Type: + string (or Expression with resultType string). + :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. Type: boolean (or Expression with resultType boolean). + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. Type: boolean (or Expression with resultType + boolean). + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(ResponsysLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Responsys' # type: str self.endpoint = endpoint @@ -32851,30 +43860,30 @@ class ResponsysObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -32909,6 +43918,30 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(ResponsysObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'ResponsysObject' # type: str self.table_name = table_name @@ -32919,32 +43952,32 @@ class ResponsysSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -32976,6 +44009,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ResponsysSource' # type: str self.query = query @@ -32986,43 +44045,43 @@ class RestResourceDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param relative_url: The relative URL to the resource that the RESTful API provides. Type: + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar relative_url: The relative URL to the resource that the RESTful API provides. Type: string (or Expression with resultType string). - :type relative_url: any - :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: + :vartype relative_url: any + :ivar request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). - :type request_method: any - :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: + :vartype request_method: any + :ivar request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). - :type request_body: any - :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: + :vartype request_body: any + :ivar additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: any - :param pagination_rules: The pagination rules to compose next page requests. Type: string (or + :vartype additional_headers: any + :ivar pagination_rules: The pagination rules to compose next page requests. Type: string (or Expression with resultType string). - :type pagination_rules: any + :vartype pagination_rules: any """ _validation = { @@ -33065,6 +44124,43 @@ def __init__( pagination_rules: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword relative_url: The relative URL to the resource that the RESTful API provides. Type: + string (or Expression with resultType string). + :paramtype relative_url: any + :keyword request_method: The HTTP method used to call the RESTful API. The default is GET. + Type: string (or Expression with resultType string). + :paramtype request_method: any + :keyword request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: + string (or Expression with resultType string). + :paramtype request_body: any + :keyword additional_headers: The additional HTTP headers in the request to the RESTful API. + Type: string (or Expression with resultType string). + :paramtype additional_headers: any + :keyword pagination_rules: The pagination rules to compose next page requests. Type: string (or + Expression with resultType string). + :paramtype pagination_rules: any + """ super(RestResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'RestResource' # type: str self.relative_url = relative_url @@ -33079,57 +44175,58 @@ class RestServiceLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param url: Required. The base URL of the REST service. - :type url: any - :param enable_server_certificate_validation: Whether to validate server side SSL certificate + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar url: Required. The base URL of the REST service. + :vartype url: any + :ivar enable_server_certificate_validation: Whether to validate server side SSL certificate when connecting to the endpoint.The default value is true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: any - :param authentication_type: Required. Type of authentication used to connect to the REST + :vartype enable_server_certificate_validation: any + :ivar authentication_type: Required. Type of authentication used to connect to the REST service. Possible values include: "Anonymous", "Basic", "AadServicePrincipal", "ManagedServiceIdentity". - :type authentication_type: str or ~azure.mgmt.datafactory.models.RestServiceAuthenticationType - :param user_name: The user name used in Basic authentication type. - :type user_name: any - :param password: The password used in Basic authentication type. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param auth_headers: The additional HTTP headers in the request to RESTful API used for + :vartype authentication_type: str or + ~azure.mgmt.datafactory.models.RestServiceAuthenticationType + :ivar user_name: The user name used in Basic authentication type. + :vartype user_name: any + :ivar password: The password used in Basic authentication type. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar auth_headers: The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). - :type auth_headers: any - :param service_principal_id: The application's client ID used in AadServicePrincipal + :vartype auth_headers: any + :ivar service_principal_id: The application's client ID used in AadServicePrincipal authentication type. - :type service_principal_id: any - :param service_principal_key: The application's key used in AadServicePrincipal authentication + :vartype service_principal_id: any + :ivar service_principal_key: The application's key used in AadServicePrincipal authentication type. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The tenant information (domain name or tenant ID) used in AadServicePrincipal + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: The tenant information (domain name or tenant ID) used in AadServicePrincipal authentication type under which your application resides. - :type tenant: any - :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + :vartype tenant: any + :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: any - :param aad_resource_id: The resource you are requesting authorization to use. - :type aad_resource_id: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype azure_cloud_type: any + :ivar aad_resource_id: The resource you are requesting authorization to use. + :vartype aad_resource_id: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype encrypted_credential: any + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -33183,6 +44280,58 @@ def __init__( credential: Optional["CredentialReference"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword url: Required. The base URL of the REST service. + :paramtype url: any + :keyword enable_server_certificate_validation: Whether to validate server side SSL certificate + when connecting to the endpoint.The default value is true. Type: boolean (or Expression with + resultType boolean). + :paramtype enable_server_certificate_validation: any + :keyword authentication_type: Required. Type of authentication used to connect to the REST + service. Possible values include: "Anonymous", "Basic", "AadServicePrincipal", + "ManagedServiceIdentity". + :paramtype authentication_type: str or + ~azure.mgmt.datafactory.models.RestServiceAuthenticationType + :keyword user_name: The user name used in Basic authentication type. + :paramtype user_name: any + :keyword password: The password used in Basic authentication type. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword auth_headers: The additional HTTP headers in the request to RESTful API used for + authorization. Type: object (or Expression with resultType object). + :paramtype auth_headers: any + :keyword service_principal_id: The application's client ID used in AadServicePrincipal + authentication type. + :paramtype service_principal_id: any + :keyword service_principal_key: The application's key used in AadServicePrincipal + authentication type. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: The tenant information (domain name or tenant ID) used in AadServicePrincipal + authentication type under which your application resides. + :paramtype tenant: any + :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. + Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is + the data factory regions’ cloud type. Type: string (or Expression with resultType string). + :paramtype azure_cloud_type: any + :keyword aad_resource_id: The resource you are requesting authorization to use. + :paramtype aad_resource_id: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'RestService' # type: str self.url = url @@ -33205,45 +44354,45 @@ class RestSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param request_method: The HTTP method used to call the RESTful API. The default is POST. Type: + :vartype disable_metrics_collection: any + :ivar request_method: The HTTP method used to call the RESTful API. The default is POST. Type: string (or Expression with resultType string). - :type request_method: any - :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: + :vartype request_method: any + :ivar additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: any - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + :vartype additional_headers: any + :ivar http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: any - :param request_interval: The time to await before sending next request, in milliseconds. - :type request_interval: any - :param http_compression_type: Http Compression Type to Send data in compressed format with + :vartype http_request_timeout: any + :ivar request_interval: The time to await before sending next request, in milliseconds. + :vartype request_interval: any + :ivar http_compression_type: Http Compression Type to Send data in compressed format with Optimal Compression Level, Default is None. And The Only Supported option is Gzip. - :type http_compression_type: any + :vartype http_compression_type: any """ _validation = { @@ -33283,6 +44432,45 @@ def __init__( http_compression_type: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword request_method: The HTTP method used to call the RESTful API. The default is POST. + Type: string (or Expression with resultType string). + :paramtype request_method: any + :keyword additional_headers: The additional HTTP headers in the request to the RESTful API. + Type: string (or Expression with resultType string). + :paramtype additional_headers: any + :keyword http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the + timeout to get a response, not the timeout to read response data. Default value: 00:01:40. + Type: string (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype http_request_timeout: any + :keyword request_interval: The time to await before sending next request, in milliseconds. + :paramtype request_interval: any + :keyword http_compression_type: Http Compression Type to Send data in compressed format with + Optimal Compression Level, Default is None. And The Only Supported option is Gzip. + :paramtype http_compression_type: any + """ super(RestSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'RestSink' # type: str self.request_method = request_method @@ -33297,45 +44485,45 @@ class RestSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: + :vartype disable_metrics_collection: any + :ivar request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). - :type request_method: any - :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: + :vartype request_method: any + :ivar request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). - :type request_body: any - :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: + :vartype request_body: any + :ivar additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: any - :param pagination_rules: The pagination rules to compose next page requests. Type: string (or + :vartype additional_headers: any + :ivar pagination_rules: The pagination rules to compose next page requests. Type: string (or Expression with resultType string). - :type pagination_rules: any - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + :vartype pagination_rules: any + :ivar http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: any - :param request_interval: The time to await before sending next page request. - :type request_interval: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype http_request_timeout: any + :ivar request_interval: The time to await before sending next page request. + :vartype request_interval: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -33375,6 +44563,45 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword request_method: The HTTP method used to call the RESTful API. The default is GET. + Type: string (or Expression with resultType string). + :paramtype request_method: any + :keyword request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: + string (or Expression with resultType string). + :paramtype request_body: any + :keyword additional_headers: The additional HTTP headers in the request to the RESTful API. + Type: string (or Expression with resultType string). + :paramtype additional_headers: any + :keyword pagination_rules: The pagination rules to compose next page requests. Type: string (or + Expression with resultType string). + :paramtype pagination_rules: any + :keyword http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the + timeout to get a response, not the timeout to read response data. Default value: 00:01:40. + Type: string (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype http_request_timeout: any + :keyword request_interval: The time to await before sending next page request. + :paramtype request_interval: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'RestSource' # type: str self.request_method = request_method @@ -33389,11 +44616,11 @@ def __init__( class RetryPolicy(msrest.serialization.Model): """Execution policy for an activity. - :param count: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with + :ivar count: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. - :type count: any - :param interval_in_seconds: Interval between retries in seconds. Default is 30. - :type interval_in_seconds: int + :vartype count: any + :ivar interval_in_seconds: Interval between retries in seconds. Default is 30. + :vartype interval_in_seconds: int """ _validation = { @@ -33412,6 +44639,13 @@ def __init__( interval_in_seconds: Optional[int] = None, **kwargs ): + """ + :keyword count: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression + with resultType integer), minimum: 0. + :paramtype count: any + :keyword interval_in_seconds: Interval between retries in seconds. Default is 30. + :paramtype interval_in_seconds: int + """ super(RetryPolicy, self).__init__(**kwargs) self.count = count self.interval_in_seconds = interval_in_seconds @@ -33422,19 +44656,19 @@ class RunFilterParameters(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param continuation_token: The continuation token for getting the next page of results. Null - for first page. - :type continuation_token: str - :param last_updated_after: Required. The time at or after which the run event was updated in + :ivar continuation_token: The continuation token for getting the next page of results. Null for + first page. + :vartype continuation_token: str + :ivar last_updated_after: Required. The time at or after which the run event was updated in 'ISO 8601' format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: Required. The time at or before which the run event was updated in + :vartype last_updated_after: ~datetime.datetime + :ivar last_updated_before: Required. The time at or before which the run event was updated in 'ISO 8601' format. - :type last_updated_before: ~datetime.datetime - :param filters: List of filters. - :type filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~azure.mgmt.datafactory.models.RunQueryOrderBy] + :vartype last_updated_before: ~datetime.datetime + :ivar filters: List of filters. + :vartype filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] + :ivar order_by: List of OrderBy option. + :vartype order_by: list[~azure.mgmt.datafactory.models.RunQueryOrderBy] """ _validation = { @@ -33460,6 +44694,21 @@ def __init__( order_by: Optional[List["RunQueryOrderBy"]] = None, **kwargs ): + """ + :keyword continuation_token: The continuation token for getting the next page of results. Null + for first page. + :paramtype continuation_token: str + :keyword last_updated_after: Required. The time at or after which the run event was updated in + 'ISO 8601' format. + :paramtype last_updated_after: ~datetime.datetime + :keyword last_updated_before: Required. The time at or before which the run event was updated + in 'ISO 8601' format. + :paramtype last_updated_before: ~datetime.datetime + :keyword filters: List of filters. + :paramtype filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] + :keyword order_by: List of OrderBy option. + :paramtype order_by: list[~azure.mgmt.datafactory.models.RunQueryOrderBy] + """ super(RunFilterParameters, self).__init__(**kwargs) self.continuation_token = continuation_token self.last_updated_after = last_updated_after @@ -33473,18 +44722,18 @@ class RunQueryFilter(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param operand: Required. Parameter name to be used for filter. The allowed operands to query + :ivar operand: Required. Parameter name to be used for filter. The allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd and Status; to query activity runs are ActivityName, ActivityRunStart, ActivityRunEnd, ActivityType and Status, and to query trigger runs are TriggerName, TriggerRunTimestamp and Status. Possible values include: "PipelineName", "Status", "RunStart", "RunEnd", "ActivityName", "ActivityRunStart", "ActivityRunEnd", "ActivityType", "TriggerName", "TriggerRunTimestamp", "RunGroupId", "LatestOnly". - :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand - :param operator: Required. Operator to be used for filter. Possible values include: "Equals", + :vartype operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand + :ivar operator: Required. Operator to be used for filter. Possible values include: "Equals", "NotEquals", "In", "NotIn". - :type operator: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperator - :param values: Required. List of filter values. - :type values: list[str] + :vartype operator: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperator + :ivar values: Required. List of filter values. + :vartype values: list[str] """ _validation = { @@ -33507,6 +44756,20 @@ def __init__( values: List[str], **kwargs ): + """ + :keyword operand: Required. Parameter name to be used for filter. The allowed operands to query + pipeline runs are PipelineName, RunStart, RunEnd and Status; to query activity runs are + ActivityName, ActivityRunStart, ActivityRunEnd, ActivityType and Status, and to query trigger + runs are TriggerName, TriggerRunTimestamp and Status. Possible values include: "PipelineName", + "Status", "RunStart", "RunEnd", "ActivityName", "ActivityRunStart", "ActivityRunEnd", + "ActivityType", "TriggerName", "TriggerRunTimestamp", "RunGroupId", "LatestOnly". + :paramtype operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand + :keyword operator: Required. Operator to be used for filter. Possible values include: "Equals", + "NotEquals", "In", "NotIn". + :paramtype operator: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperator + :keyword values: Required. List of filter values. + :paramtype values: list[str] + """ super(RunQueryFilter, self).__init__(**kwargs) self.operand = operand self.operator = operator @@ -33518,15 +44781,15 @@ class RunQueryOrderBy(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param order_by: Required. Parameter name to be used for order by. The allowed parameters to + :ivar order_by: Required. Parameter name to be used for order by. The allowed parameters to order by for pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are ActivityName, ActivityRunStart, ActivityRunEnd and Status; for trigger runs are TriggerName, TriggerRunTimestamp and Status. Possible values include: "RunStart", "RunEnd", "PipelineName", "Status", "ActivityName", "ActivityRunStart", "ActivityRunEnd", "TriggerName", "TriggerRunTimestamp". - :type order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField - :param order: Required. Sorting order of the parameter. Possible values include: "ASC", "DESC". - :type order: str or ~azure.mgmt.datafactory.models.RunQueryOrder + :vartype order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField + :ivar order: Required. Sorting order of the parameter. Possible values include: "ASC", "DESC". + :vartype order: str or ~azure.mgmt.datafactory.models.RunQueryOrder """ _validation = { @@ -33546,6 +44809,18 @@ def __init__( order: Union[str, "RunQueryOrder"], **kwargs ): + """ + :keyword order_by: Required. Parameter name to be used for order by. The allowed parameters to + order by for pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are + ActivityName, ActivityRunStart, ActivityRunEnd and Status; for trigger runs are TriggerName, + TriggerRunTimestamp and Status. Possible values include: "RunStart", "RunEnd", "PipelineName", + "Status", "ActivityName", "ActivityRunStart", "ActivityRunEnd", "TriggerName", + "TriggerRunTimestamp". + :paramtype order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField + :keyword order: Required. Sorting order of the parameter. Possible values include: "ASC", + "DESC". + :paramtype order: str or ~azure.mgmt.datafactory.models.RunQueryOrder + """ super(RunQueryOrderBy, self).__init__(**kwargs) self.order_by = order_by self.order = order @@ -33556,38 +44831,38 @@ class SalesforceLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param environment_url: The URL of Salesforce instance. Default is + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar environment_url: The URL of Salesforce instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). - :type environment_url: any - :param username: The username for Basic authentication of the Salesforce instance. Type: string + :vartype environment_url: any + :ivar username: The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). - :type username: any - :param password: The password for Basic authentication of the Salesforce instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param security_token: The security token is optional to remotely access Salesforce instance. - :type security_token: ~azure.mgmt.datafactory.models.SecretBase - :param api_version: The Salesforce API version used in ADF. Type: string (or Expression with + :vartype username: any + :ivar password: The password for Basic authentication of the Salesforce instance. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar security_token: The security token is optional to remotely access Salesforce instance. + :vartype security_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar api_version: The Salesforce API version used in ADF. Type: string (or Expression with resultType string). - :type api_version: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype api_version: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -33625,6 +44900,38 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword environment_url: The URL of Salesforce instance. Default is + 'https://login.salesforce.com'. To copy data from sandbox, specify + 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, + 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). + :paramtype environment_url: any + :keyword username: The username for Basic authentication of the Salesforce instance. Type: + string (or Expression with resultType string). + :paramtype username: any + :keyword password: The password for Basic authentication of the Salesforce instance. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword security_token: The security token is optional to remotely access Salesforce instance. + :paramtype security_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword api_version: The Salesforce API version used in ADF. Type: string (or Expression with + resultType string). + :paramtype api_version: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(SalesforceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Salesforce' # type: str self.environment_url = environment_url @@ -33640,43 +44947,43 @@ class SalesforceMarketingCloudLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_properties: Properties used to connect to Salesforce Marketing Cloud. It is + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_properties: Properties used to connect to Salesforce Marketing Cloud. It is mutually exclusive with any other properties in the linked service. Type: object. - :type connection_properties: any - :param client_id: The client ID associated with the Salesforce Marketing Cloud application. + :vartype connection_properties: any + :ivar client_id: The client ID associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). - :type client_id: any - :param client_secret: The client secret associated with the Salesforce Marketing Cloud + :vartype client_id: any + :ivar client_secret: The client secret associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -33716,6 +45023,43 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_properties: Properties used to connect to Salesforce Marketing Cloud. It is + mutually exclusive with any other properties in the linked service. Type: object. + :paramtype connection_properties: any + :keyword client_id: The client ID associated with the Salesforce Marketing Cloud application. + Type: string (or Expression with resultType string). + :paramtype client_id: any + :keyword client_secret: The client secret associated with the Salesforce Marketing Cloud + application. Type: string (or Expression with resultType string). + :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. Type: boolean (or Expression with resultType boolean). + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. Type: boolean (or Expression with resultType + boolean). + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(SalesforceMarketingCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'SalesforceMarketingCloud' # type: str self.connection_properties = connection_properties @@ -33732,30 +45076,30 @@ class SalesforceMarketingCloudObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -33790,6 +45134,30 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(SalesforceMarketingCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'SalesforceMarketingCloudObject' # type: str self.table_name = table_name @@ -33800,32 +45168,32 @@ class SalesforceMarketingCloudSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -33857,6 +45225,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SalesforceMarketingCloudSource' # type: str self.query = query @@ -33867,31 +45261,31 @@ class SalesforceObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param object_api_name: The Salesforce object API name. Type: string (or Expression with + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar object_api_name: The Salesforce object API name. Type: string (or Expression with resultType string). - :type object_api_name: any + :vartype object_api_name: any """ _validation = { @@ -33926,6 +45320,31 @@ def __init__( object_api_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword object_api_name: The Salesforce object API name. Type: string (or Expression with + resultType string). + :paramtype object_api_name: any + """ super(SalesforceObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'SalesforceObject' # type: str self.object_api_name = object_api_name @@ -33936,41 +45355,41 @@ class SalesforceServiceCloudLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param environment_url: The URL of Salesforce Service Cloud instance. Default is + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar environment_url: The URL of Salesforce Service Cloud instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). - :type environment_url: any - :param username: The username for Basic authentication of the Salesforce instance. Type: string + :vartype environment_url: any + :ivar username: The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). - :type username: any - :param password: The password for Basic authentication of the Salesforce instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param security_token: The security token is optional to remotely access Salesforce instance. - :type security_token: ~azure.mgmt.datafactory.models.SecretBase - :param api_version: The Salesforce API version used in ADF. Type: string (or Expression with + :vartype username: any + :ivar password: The password for Basic authentication of the Salesforce instance. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar security_token: The security token is optional to remotely access Salesforce instance. + :vartype security_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar api_version: The Salesforce API version used in ADF. Type: string (or Expression with resultType string). - :type api_version: any - :param extended_properties: Extended properties appended to the connection string. Type: string + :vartype api_version: any + :ivar extended_properties: Extended properties appended to the connection string. Type: string (or Expression with resultType string). - :type extended_properties: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype extended_properties: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -34010,6 +45429,41 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword environment_url: The URL of Salesforce Service Cloud instance. Default is + 'https://login.salesforce.com'. To copy data from sandbox, specify + 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, + 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). + :paramtype environment_url: any + :keyword username: The username for Basic authentication of the Salesforce instance. Type: + string (or Expression with resultType string). + :paramtype username: any + :keyword password: The password for Basic authentication of the Salesforce instance. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword security_token: The security token is optional to remotely access Salesforce instance. + :paramtype security_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword api_version: The Salesforce API version used in ADF. Type: string (or Expression with + resultType string). + :paramtype api_version: any + :keyword extended_properties: Extended properties appended to the connection string. Type: + string (or Expression with resultType string). + :paramtype extended_properties: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(SalesforceServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'SalesforceServiceCloud' # type: str self.environment_url = environment_url @@ -34026,31 +45480,31 @@ class SalesforceServiceCloudObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param object_api_name: The Salesforce Service Cloud object API name. Type: string (or + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar object_api_name: The Salesforce Service Cloud object API name. Type: string (or Expression with resultType string). - :type object_api_name: any + :vartype object_api_name: any """ _validation = { @@ -34085,6 +45539,31 @@ def __init__( object_api_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword object_api_name: The Salesforce Service Cloud object API name. Type: string (or + Expression with resultType string). + :paramtype object_api_name: any + """ super(SalesforceServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'SalesforceServiceCloudObject' # type: str self.object_api_name = object_api_name @@ -34095,42 +45574,42 @@ class SalesforceServiceCloudSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param write_behavior: The write behavior for the operation. Default is Insert. Possible values + :vartype disable_metrics_collection: any + :ivar write_behavior: The write behavior for the operation. Default is Insert. Possible values include: "Insert", "Upsert". - :type write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior - :param external_id_field_name: The name of the external ID field for upsert operation. Default + :vartype write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :ivar external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). - :type external_id_field_name: any - :param ignore_null_values: The flag indicating whether or not to ignore null values from input + :vartype external_id_field_name: any + :ivar ignore_null_values: The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: any + :vartype ignore_null_values: any """ _validation = { @@ -34166,6 +45645,43 @@ def __init__( ignore_null_values: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword write_behavior: The write behavior for the operation. Default is Insert. Possible + values include: "Insert", "Upsert". + :paramtype write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :keyword external_id_field_name: The name of the external ID field for upsert operation. + Default value is 'Id' column. Type: string (or Expression with resultType string). + :paramtype external_id_field_name: any + :keyword ignore_null_values: The flag indicating whether or not to ignore null values from + input dataset (except key fields) during write operation. Default value is false. If set it to + true, it means ADF will leave the data in the destination object unchanged when doing + upsert/update operation and insert defined default value when doing insert operation, versus + ADF will update the data in the destination object to NULL when doing upsert/update operation + and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType + boolean). + :paramtype ignore_null_values: any + """ super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SalesforceServiceCloudSink' # type: str self.write_behavior = write_behavior @@ -34178,31 +45694,31 @@ class SalesforceServiceCloudSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: Database query. Type: string (or Expression with resultType string). - :type query: any - :param read_behavior: The read behavior for the operation. Default is Query. Possible values + :vartype disable_metrics_collection: any + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: any + :ivar read_behavior: The read behavior for the operation. Default is Query. Possible values include: "Query", "QueryAll". - :type read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -34234,6 +45750,31 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: any + :keyword read_behavior: The read behavior for the operation. Default is Query. Possible values + include: "Query", "QueryAll". + :paramtype read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SalesforceServiceCloudSource' # type: str self.query = query @@ -34246,42 +45787,42 @@ class SalesforceSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param write_behavior: The write behavior for the operation. Default is Insert. Possible values + :vartype disable_metrics_collection: any + :ivar write_behavior: The write behavior for the operation. Default is Insert. Possible values include: "Insert", "Upsert". - :type write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior - :param external_id_field_name: The name of the external ID field for upsert operation. Default + :vartype write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :ivar external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). - :type external_id_field_name: any - :param ignore_null_values: The flag indicating whether or not to ignore null values from input + :vartype external_id_field_name: any + :ivar ignore_null_values: The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: any + :vartype ignore_null_values: any """ _validation = { @@ -34317,6 +45858,43 @@ def __init__( ignore_null_values: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword write_behavior: The write behavior for the operation. Default is Insert. Possible + values include: "Insert", "Upsert". + :paramtype write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :keyword external_id_field_name: The name of the external ID field for upsert operation. + Default value is 'Id' column. Type: string (or Expression with resultType string). + :paramtype external_id_field_name: any + :keyword ignore_null_values: The flag indicating whether or not to ignore null values from + input dataset (except key fields) during write operation. Default value is false. If set it to + true, it means ADF will leave the data in the destination object unchanged when doing + upsert/update operation and insert defined default value when doing insert operation, versus + ADF will update the data in the destination object to NULL when doing upsert/update operation + and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType + boolean). + :paramtype ignore_null_values: any + """ super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SalesforceSink' # type: str self.write_behavior = write_behavior @@ -34329,34 +45907,34 @@ class SalesforceSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: Database query. Type: string (or Expression with resultType string). - :type query: any - :param read_behavior: The read behavior for the operation. Default is Query. Possible values + :vartype additional_columns: any + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: any + :ivar read_behavior: The read behavior for the operation. Default is Query. Possible values include: "Query", "QueryAll". - :type read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + :vartype read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior """ _validation = { @@ -34390,6 +45968,34 @@ def __init__( read_behavior: Optional[Union[str, "SalesforceSourceReadBehavior"]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: any + :keyword read_behavior: The read behavior for the operation. Default is Query. Possible values + include: "Query", "QueryAll". + :paramtype read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + """ super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SalesforceSource' # type: str self.query = query @@ -34401,28 +46007,28 @@ class SapBwCubeDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder """ _validation = { @@ -34455,6 +46061,28 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + """ super(SapBwCubeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'SapBwCube' # type: str @@ -34464,37 +46092,37 @@ class SapBWLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param server: Required. Host name of the SAP BW instance. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar server: Required. Host name of the SAP BW instance. Type: string (or Expression with resultType string). - :type server: any - :param system_number: Required. System number of the BW system. (Usually a two-digit decimal + :vartype server: any + :ivar system_number: Required. System number of the BW system. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). - :type system_number: any - :param client_id: Required. Client ID of the client on the BW system. (Usually a three-digit + :vartype system_number: any + :ivar client_id: Required. Client ID of the client on the BW system. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). - :type client_id: any - :param user_name: Username to access the SAP BW server. Type: string (or Expression with + :vartype client_id: any + :ivar user_name: Username to access the SAP BW server. Type: string (or Expression with resultType string). - :type user_name: any - :param password: Password to access the SAP BW server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype user_name: any + :ivar password: Password to access the SAP BW server. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -34535,6 +46163,37 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword server: Required. Host name of the SAP BW instance. Type: string (or Expression with + resultType string). + :paramtype server: any + :keyword system_number: Required. System number of the BW system. (Usually a two-digit decimal + number represented as a string.) Type: string (or Expression with resultType string). + :paramtype system_number: any + :keyword client_id: Required. Client ID of the client on the BW system. (Usually a three-digit + decimal number represented as a string) Type: string (or Expression with resultType string). + :paramtype client_id: any + :keyword user_name: Username to access the SAP BW server. Type: string (or Expression with + resultType string). + :paramtype user_name: any + :keyword password: Password to access the SAP BW server. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(SapBWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'SapBW' # type: str self.server = server @@ -34550,31 +46209,31 @@ class SapBwSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: MDX query. Type: string (or Expression with resultType string). - :type query: any + :vartype additional_columns: any + :ivar query: MDX query. Type: string (or Expression with resultType string). + :vartype query: any """ _validation = { @@ -34606,6 +46265,31 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: MDX query. Type: string (or Expression with resultType string). + :paramtype query: any + """ super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapBwSource' # type: str self.query = query @@ -34616,32 +46300,32 @@ class SapCloudForCustomerLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param url: Required. The URL of SAP Cloud for Customer OData API. For example, + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar url: Required. The URL of SAP Cloud for Customer OData API. For example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string (or Expression with resultType string). - :type url: any - :param username: The username for Basic authentication. Type: string (or Expression with + :vartype url: any + :ivar username: The username for Basic authentication. Type: string (or Expression with resultType string). - :type username: any - :param password: The password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype username: any + :ivar password: The password for Basic authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -34676,6 +46360,32 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword url: Required. The URL of SAP Cloud for Customer OData API. For example, + '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string (or Expression with + resultType string). + :paramtype url: any + :keyword username: The username for Basic authentication. Type: string (or Expression with + resultType string). + :paramtype username: any + :keyword password: The password for Basic authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Either encryptedCredential or + username/password must be provided. Type: string (or Expression with resultType string). + :paramtype encrypted_credential: any + """ super(SapCloudForCustomerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'SapCloudForCustomer' # type: str self.url = url @@ -34689,31 +46399,31 @@ class SapCloudForCustomerResourceDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param path: Required. The path of the SAP Cloud for Customer OData entity. Type: string (or + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar path: Required. The path of the SAP Cloud for Customer OData entity. Type: string (or Expression with resultType string). - :type path: any + :vartype path: any """ _validation = { @@ -34749,6 +46459,31 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword path: Required. The path of the SAP Cloud for Customer OData entity. Type: string (or + Expression with resultType string). + :paramtype path: any + """ super(SapCloudForCustomerResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'SapCloudForCustomerResource' # type: str self.path = path @@ -34759,38 +46494,38 @@ class SapCloudForCustomerSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param write_behavior: The write behavior for the operation. Default is 'Insert'. Possible + :vartype disable_metrics_collection: any + :ivar write_behavior: The write behavior for the operation. Default is 'Insert'. Possible values include: "Insert", "Update". - :type write_behavior: str or + :vartype write_behavior: str or ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + :ivar http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: any + :vartype http_request_timeout: any """ _validation = { @@ -34824,6 +46559,38 @@ def __init__( http_request_timeout: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword write_behavior: The write behavior for the operation. Default is 'Insert'. Possible + values include: "Insert", "Update". + :paramtype write_behavior: str or + ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior + :keyword http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the + timeout to get a response, not the timeout to read response data. Default value: 00:05:00. + Type: string (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype http_request_timeout: any + """ super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SapCloudForCustomerSink' # type: str self.write_behavior = write_behavior @@ -34835,37 +46602,37 @@ class SapCloudForCustomerSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or + :vartype additional_columns: any + :ivar query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :type query: any - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + :vartype query: any + :ivar http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: any + :vartype http_request_timeout: any """ _validation = { @@ -34899,6 +46666,37 @@ def __init__( http_request_timeout: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or + Expression with resultType string). + :paramtype query: any + :keyword http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the + timeout to get a response, not the timeout to read response data. Default value: 00:05:00. + Type: string (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype http_request_timeout: any + """ super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapCloudForCustomerSource' # type: str self.query = query @@ -34910,32 +46708,32 @@ class SapEccLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param url: Required. The URL of SAP ECC OData API. For example, + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar url: Required. The URL of SAP ECC OData API. For example, '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with resultType string). - :type url: str - :param username: The username for Basic authentication. Type: string (or Expression with + :vartype url: str + :ivar username: The username for Basic authentication. Type: string (or Expression with resultType string). - :type username: str - :param password: The password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype username: str + :ivar password: The password for Basic authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). - :type encrypted_credential: str + :vartype encrypted_credential: str """ _validation = { @@ -34970,6 +46768,32 @@ def __init__( encrypted_credential: Optional[str] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword url: Required. The URL of SAP ECC OData API. For example, + '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with + resultType string). + :paramtype url: str + :keyword username: The username for Basic authentication. Type: string (or Expression with + resultType string). + :paramtype username: str + :keyword password: The password for Basic authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Either encryptedCredential or + username/password must be provided. Type: string (or Expression with resultType string). + :paramtype encrypted_credential: str + """ super(SapEccLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'SapEcc' # type: str self.url = url @@ -34983,31 +46807,31 @@ class SapEccResourceDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). - :type path: any + :vartype path: any """ _validation = { @@ -35043,6 +46867,31 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with + resultType string). + :paramtype path: any + """ super(SapEccResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'SapEccResource' # type: str self.path = path @@ -35053,37 +46902,37 @@ class SapEccSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with + :vartype additional_columns: any + :ivar query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :type query: any - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + :vartype query: any + :ivar http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: any + :vartype http_request_timeout: any """ _validation = { @@ -35117,6 +46966,37 @@ def __init__( http_request_timeout: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with + resultType string). + :paramtype query: any + :keyword http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the + timeout to get a response, not the timeout to read response data. Default value: 00:05:00. + Type: string (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype http_request_timeout: any + """ super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapEccSource' # type: str self.query = query @@ -35128,37 +47008,37 @@ class SapHanaLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: SAP HANA ODBC connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: SAP HANA ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param server: Host name of the SAP HANA server. Type: string (or Expression with resultType + :vartype connection_string: any + :ivar server: Host name of the SAP HANA server. Type: string (or Expression with resultType string). - :type server: any - :param authentication_type: The authentication type to be used to connect to the SAP HANA + :vartype server: any + :ivar authentication_type: The authentication type to be used to connect to the SAP HANA server. Possible values include: "Basic", "Windows". - :type authentication_type: str or ~azure.mgmt.datafactory.models.SapHanaAuthenticationType - :param user_name: Username to access the SAP HANA server. Type: string (or Expression with + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.SapHanaAuthenticationType + :ivar user_name: Username to access the SAP HANA server. Type: string (or Expression with resultType string). - :type user_name: any - :param password: Password to access the SAP HANA server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype user_name: any + :ivar password: Password to access the SAP HANA server. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -35196,6 +47076,37 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: SAP HANA ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword server: Host name of the SAP HANA server. Type: string (or Expression with resultType + string). + :paramtype server: any + :keyword authentication_type: The authentication type to be used to connect to the SAP HANA + server. Possible values include: "Basic", "Windows". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.SapHanaAuthenticationType + :keyword user_name: Username to access the SAP HANA server. Type: string (or Expression with + resultType string). + :paramtype user_name: any + :keyword password: Password to access the SAP HANA server. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'SapHana' # type: str self.connection_string = connection_string @@ -35209,9 +47120,9 @@ def __init__( class SapHanaPartitionSettings(msrest.serialization.Model): """The settings that will be leveraged for SAP HANA source partitioning. - :param partition_column_name: The name of the column that will be used for proceeding range + :ivar partition_column_name: The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: any + :vartype partition_column_name: any """ _attribute_map = { @@ -35224,6 +47135,11 @@ def __init__( partition_column_name: Optional[Any] = None, **kwargs ): + """ + :keyword partition_column_name: The name of the column that will be used for proceeding range + partitioning. Type: string (or Expression with resultType string). + :paramtype partition_column_name: any + """ super(SapHanaPartitionSettings, self).__init__(**kwargs) self.partition_column_name = partition_column_name @@ -35233,40 +47149,39 @@ class SapHanaSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: SAP HANA Sql query. Type: string (or Expression with resultType string). - :type query: any - :param packet_size: The packet size of data read from SAP HANA. Type: integer(or Expression - with resultType integer). - :type packet_size: any - :param partition_option: The partition mechanism that will be used for SAP HANA read in + :vartype additional_columns: any + :ivar query: SAP HANA Sql query. Type: string (or Expression with resultType string). + :vartype query: any + :ivar packet_size: The packet size of data read from SAP HANA. Type: integer(or Expression with + resultType integer). + :vartype packet_size: any + :ivar partition_option: The partition mechanism that will be used for SAP HANA read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "SapHanaDynamicRange". - :type partition_option: any - :param partition_settings: The settings that will be leveraged for SAP HANA source - partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.SapHanaPartitionSettings + :vartype partition_option: any + :ivar partition_settings: The settings that will be leveraged for SAP HANA source partitioning. + :vartype partition_settings: ~azure.mgmt.datafactory.models.SapHanaPartitionSettings """ _validation = { @@ -35304,6 +47219,40 @@ def __init__( partition_settings: Optional["SapHanaPartitionSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: SAP HANA Sql query. Type: string (or Expression with resultType string). + :paramtype query: any + :keyword packet_size: The packet size of data read from SAP HANA. Type: integer(or Expression + with resultType integer). + :paramtype packet_size: any + :keyword partition_option: The partition mechanism that will be used for SAP HANA read in + parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "SapHanaDynamicRange". + :paramtype partition_option: any + :keyword partition_settings: The settings that will be leveraged for SAP HANA source + partitioning. + :paramtype partition_settings: ~azure.mgmt.datafactory.models.SapHanaPartitionSettings + """ super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapHanaSource' # type: str self.query = query @@ -35317,33 +47266,33 @@ class SapHanaTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param schema_type_properties_schema: The schema name of SAP HANA. Type: string (or Expression + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar schema_type_properties_schema: The schema name of SAP HANA. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any - :param table: The table name of SAP HANA. Type: string (or Expression with resultType string). - :type table: any + :vartype schema_type_properties_schema: any + :ivar table: The table name of SAP HANA. Type: string (or Expression with resultType string). + :vartype table: any """ _validation = { @@ -35380,6 +47329,34 @@ def __init__( table: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword schema_type_properties_schema: The schema name of SAP HANA. Type: string (or + Expression with resultType string). + :paramtype schema_type_properties_schema: any + :keyword table: The table name of SAP HANA. Type: string (or Expression with resultType + string). + :paramtype table: any + """ super(SapHanaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'SapHanaTable' # type: str self.schema_type_properties_schema = schema_type_properties_schema @@ -35391,55 +47368,54 @@ class SapOpenHubLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param server: Host name of the SAP BW instance where the open hub destination is located. - Type: string (or Expression with resultType string). - :type server: any - :param system_number: System number of the BW system where the open hub destination is located. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar server: Host name of the SAP BW instance where the open hub destination is located. Type: + string (or Expression with resultType string). + :vartype server: any + :ivar system_number: System number of the BW system where the open hub destination is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). - :type system_number: any - :param client_id: Client ID of the client on the BW system where the open hub destination is + :vartype system_number: any + :ivar client_id: Client ID of the client on the BW system where the open hub destination is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). - :type client_id: any - :param language: Language of the BW system where the open hub destination is located. The + :vartype client_id: any + :ivar language: Language of the BW system where the open hub destination is located. The default value is EN. Type: string (or Expression with resultType string). - :type language: any - :param system_id: SystemID of the SAP system where the table is located. Type: string (or + :vartype language: any + :ivar system_id: SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). - :type system_id: any - :param user_name: Username to access the SAP BW server where the open hub destination is + :vartype system_id: any + :ivar user_name: Username to access the SAP BW server where the open hub destination is located. Type: string (or Expression with resultType string). - :type user_name: any - :param password: Password to access the SAP BW server where the open hub destination is - located. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param message_server: The hostname of the SAP Message Server. Type: string (or Expression with + :vartype user_name: any + :ivar password: Password to access the SAP BW server where the open hub destination is located. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar message_server: The hostname of the SAP Message Server. Type: string (or Expression with resultType string). - :type message_server: any - :param message_server_service: The service name or port number of the Message Server. Type: + :vartype message_server: any + :ivar message_server_service: The service name or port number of the Message Server. Type: string (or Expression with resultType string). - :type message_server_service: any - :param logon_group: The Logon Group for the SAP System. Type: string (or Expression with + :vartype message_server_service: any + :ivar logon_group: The Logon Group for the SAP System. Type: string (or Expression with resultType string). - :type logon_group: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype logon_group: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -35487,6 +47463,55 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword server: Host name of the SAP BW instance where the open hub destination is located. + Type: string (or Expression with resultType string). + :paramtype server: any + :keyword system_number: System number of the BW system where the open hub destination is + located. (Usually a two-digit decimal number represented as a string.) Type: string (or + Expression with resultType string). + :paramtype system_number: any + :keyword client_id: Client ID of the client on the BW system where the open hub destination is + located. (Usually a three-digit decimal number represented as a string) Type: string (or + Expression with resultType string). + :paramtype client_id: any + :keyword language: Language of the BW system where the open hub destination is located. The + default value is EN. Type: string (or Expression with resultType string). + :paramtype language: any + :keyword system_id: SystemID of the SAP system where the table is located. Type: string (or + Expression with resultType string). + :paramtype system_id: any + :keyword user_name: Username to access the SAP BW server where the open hub destination is + located. Type: string (or Expression with resultType string). + :paramtype user_name: any + :keyword password: Password to access the SAP BW server where the open hub destination is + located. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword message_server: The hostname of the SAP Message Server. Type: string (or Expression + with resultType string). + :paramtype message_server: any + :keyword message_server_service: The service name or port number of the Message Server. Type: + string (or Expression with resultType string). + :paramtype message_server_service: any + :keyword logon_group: The Logon Group for the SAP System. Type: string (or Expression with + resultType string). + :paramtype logon_group: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(SapOpenHubLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'SapOpenHub' # type: str self.server = server @@ -35507,43 +47532,43 @@ class SapOpenHubSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param exclude_last_request: Whether to exclude the records of the last request. The default + :vartype additional_columns: any + :ivar exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). - :type exclude_last_request: any - :param base_request_id: The ID of request for delta loading. Once it is set, only data with + :vartype exclude_last_request: any + :ivar base_request_id: The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). - :type base_request_id: any - :param custom_rfc_read_table_function_module: Specifies the custom RFC function module that - will be used to read data from SAP Table. Type: string (or Expression with resultType string). - :type custom_rfc_read_table_function_module: any - :param sap_data_column_delimiter: The single character that will be used as delimiter passed to + :vartype base_request_id: any + :ivar custom_rfc_read_table_function_module: Specifies the custom RFC function module that will + be used to read data from SAP Table. Type: string (or Expression with resultType string). + :vartype custom_rfc_read_table_function_module: any + :ivar sap_data_column_delimiter: The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). - :type sap_data_column_delimiter: any + :vartype sap_data_column_delimiter: any """ _validation = { @@ -35581,6 +47606,43 @@ def __init__( sap_data_column_delimiter: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword exclude_last_request: Whether to exclude the records of the last request. The default + value is true. Type: boolean (or Expression with resultType boolean). + :paramtype exclude_last_request: any + :keyword base_request_id: The ID of request for delta loading. Once it is set, only data with + requestId larger than the value of this property will be retrieved. The default value is 0. + Type: integer (or Expression with resultType integer ). + :paramtype base_request_id: any + :keyword custom_rfc_read_table_function_module: Specifies the custom RFC function module that + will be used to read data from SAP Table. Type: string (or Expression with resultType string). + :paramtype custom_rfc_read_table_function_module: any + :keyword sap_data_column_delimiter: The single character that will be used as delimiter passed + to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with + resultType string). + :paramtype sap_data_column_delimiter: any + """ super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapOpenHubSource' # type: str self.exclude_last_request = exclude_last_request @@ -35594,38 +47656,38 @@ class SapOpenHubTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param open_hub_destination_name: Required. The name of the Open Hub Destination with + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar open_hub_destination_name: Required. The name of the Open Hub Destination with destination type as Database Table. Type: string (or Expression with resultType string). - :type open_hub_destination_name: any - :param exclude_last_request: Whether to exclude the records of the last request. The default + :vartype open_hub_destination_name: any + :ivar exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). - :type exclude_last_request: any - :param base_request_id: The ID of request for delta loading. Once it is set, only data with + :vartype exclude_last_request: any + :ivar base_request_id: The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). - :type base_request_id: any + :vartype base_request_id: any """ _validation = { @@ -35665,6 +47727,38 @@ def __init__( base_request_id: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword open_hub_destination_name: Required. The name of the Open Hub Destination with + destination type as Database Table. Type: string (or Expression with resultType string). + :paramtype open_hub_destination_name: any + :keyword exclude_last_request: Whether to exclude the records of the last request. The default + value is true. Type: boolean (or Expression with resultType boolean). + :paramtype exclude_last_request: any + :keyword base_request_id: The ID of request for delta loading. Once it is set, only data with + requestId larger than the value of this property will be retrieved. The default value is 0. + Type: integer (or Expression with resultType integer ). + :paramtype base_request_id: any + """ super(SapOpenHubTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'SapOpenHubTable' # type: str self.open_hub_destination_name = open_hub_destination_name @@ -35677,69 +47771,69 @@ class SapTableLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param server: Host name of the SAP instance where the table is located. Type: string (or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar server: Host name of the SAP instance where the table is located. Type: string (or Expression with resultType string). - :type server: any - :param system_number: System number of the SAP system where the table is located. (Usually a + :vartype server: any + :ivar system_number: System number of the SAP system where the table is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). - :type system_number: any - :param client_id: Client ID of the client on the SAP system where the table is located. - (Usually a three-digit decimal number represented as a string) Type: string (or Expression with + :vartype system_number: any + :ivar client_id: Client ID of the client on the SAP system where the table is located. (Usually + a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). - :type client_id: any - :param language: Language of the SAP system where the table is located. The default value is - EN. Type: string (or Expression with resultType string). - :type language: any - :param system_id: SystemID of the SAP system where the table is located. Type: string (or + :vartype client_id: any + :ivar language: Language of the SAP system where the table is located. The default value is EN. + Type: string (or Expression with resultType string). + :vartype language: any + :ivar system_id: SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). - :type system_id: any - :param user_name: Username to access the SAP server where the table is located. Type: string - (or Expression with resultType string). - :type user_name: any - :param password: Password to access the SAP server where the table is located. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param message_server: The hostname of the SAP Message Server. Type: string (or Expression with + :vartype system_id: any + :ivar user_name: Username to access the SAP server where the table is located. Type: string (or + Expression with resultType string). + :vartype user_name: any + :ivar password: Password to access the SAP server where the table is located. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar message_server: The hostname of the SAP Message Server. Type: string (or Expression with resultType string). - :type message_server: any - :param message_server_service: The service name or port number of the Message Server. Type: + :vartype message_server: any + :ivar message_server_service: The service name or port number of the Message Server. Type: string (or Expression with resultType string). - :type message_server_service: any - :param snc_mode: SNC activation indicator to access the SAP server where the table is located. + :vartype message_server_service: any + :ivar snc_mode: SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). - :type snc_mode: any - :param snc_my_name: Initiator's SNC name to access the SAP server where the table is located. + :vartype snc_mode: any + :ivar snc_my_name: Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :type snc_my_name: any - :param snc_partner_name: Communication partner's SNC name to access the SAP server where the + :vartype snc_my_name: any + :ivar snc_partner_name: Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :type snc_partner_name: any - :param snc_library_path: External security product's library to access the SAP server where the + :vartype snc_partner_name: any + :ivar snc_library_path: External security product's library to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :type snc_library_path: any - :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string + :vartype snc_library_path: any + :ivar snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string (or Expression with resultType string). - :type snc_qop: any - :param logon_group: The Logon Group for the SAP System. Type: string (or Expression with + :vartype snc_qop: any + :ivar logon_group: The Logon Group for the SAP System. Type: string (or Expression with resultType string). - :type logon_group: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype logon_group: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -35797,6 +47891,69 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword server: Host name of the SAP instance where the table is located. Type: string (or + Expression with resultType string). + :paramtype server: any + :keyword system_number: System number of the SAP system where the table is located. (Usually a + two-digit decimal number represented as a string.) Type: string (or Expression with resultType + string). + :paramtype system_number: any + :keyword client_id: Client ID of the client on the SAP system where the table is located. + (Usually a three-digit decimal number represented as a string) Type: string (or Expression with + resultType string). + :paramtype client_id: any + :keyword language: Language of the SAP system where the table is located. The default value is + EN. Type: string (or Expression with resultType string). + :paramtype language: any + :keyword system_id: SystemID of the SAP system where the table is located. Type: string (or + Expression with resultType string). + :paramtype system_id: any + :keyword user_name: Username to access the SAP server where the table is located. Type: string + (or Expression with resultType string). + :paramtype user_name: any + :keyword password: Password to access the SAP server where the table is located. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword message_server: The hostname of the SAP Message Server. Type: string (or Expression + with resultType string). + :paramtype message_server: any + :keyword message_server_service: The service name or port number of the Message Server. Type: + string (or Expression with resultType string). + :paramtype message_server_service: any + :keyword snc_mode: SNC activation indicator to access the SAP server where the table is + located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). + :paramtype snc_mode: any + :keyword snc_my_name: Initiator's SNC name to access the SAP server where the table is located. + Type: string (or Expression with resultType string). + :paramtype snc_my_name: any + :keyword snc_partner_name: Communication partner's SNC name to access the SAP server where the + table is located. Type: string (or Expression with resultType string). + :paramtype snc_partner_name: any + :keyword snc_library_path: External security product's library to access the SAP server where + the table is located. Type: string (or Expression with resultType string). + :paramtype snc_library_path: any + :keyword snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string + (or Expression with resultType string). + :paramtype snc_qop: any + :keyword logon_group: The Logon Group for the SAP System. Type: string (or Expression with + resultType string). + :paramtype logon_group: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(SapTableLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'SapTable' # type: str self.server = server @@ -35820,20 +47977,20 @@ def __init__( class SapTablePartitionSettings(msrest.serialization.Model): """The settings that will be leveraged for SAP table source partitioning. - :param partition_column_name: The name of the column that will be used for proceeding range + :ivar partition_column_name: The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: any - :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + :vartype partition_column_name: any + :ivar partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_upper_bound: any - :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + :vartype partition_upper_bound: any + :ivar partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_lower_bound: any - :param max_partitions_number: The maximum value of partitions the table will be split into. + :vartype partition_lower_bound: any + :ivar max_partitions_number: The maximum value of partitions the table will be split into. Type: integer (or Expression with resultType string). - :type max_partitions_number: any + :vartype max_partitions_number: any """ _attribute_map = { @@ -35852,6 +48009,22 @@ def __init__( max_partitions_number: Optional[Any] = None, **kwargs ): + """ + :keyword partition_column_name: The name of the column that will be used for proceeding range + partitioning. Type: string (or Expression with resultType string). + :paramtype partition_column_name: any + :keyword partition_upper_bound: The maximum value of column specified in partitionColumnName + that will be used for proceeding range partitioning. Type: string (or Expression with + resultType string). + :paramtype partition_upper_bound: any + :keyword partition_lower_bound: The minimum value of column specified in partitionColumnName + that will be used for proceeding range partitioning. Type: string (or Expression with + resultType string). + :paramtype partition_lower_bound: any + :keyword max_partitions_number: The maximum value of partitions the table will be split into. + Type: integer (or Expression with resultType string). + :paramtype max_partitions_number: any + """ super(SapTablePartitionSettings, self).__init__(**kwargs) self.partition_column_name = partition_column_name self.partition_upper_bound = partition_upper_bound @@ -35864,31 +48037,31 @@ class SapTableResourceDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: Required. The name of the SAP Table. Type: string (or Expression with + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: Required. The name of the SAP Table. Type: string (or Expression with resultType string). - :type table_name: any + :vartype table_name: any """ _validation = { @@ -35924,6 +48097,31 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: Required. The name of the SAP Table. Type: string (or Expression with + resultType string). + :paramtype table_name: any + """ super(SapTableResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'SapTableResource' # type: str self.table_name = table_name @@ -35934,58 +48132,58 @@ class SapTableSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param row_count: The number of rows to be retrieved. Type: integer(or Expression with + :vartype additional_columns: any + :ivar row_count: The number of rows to be retrieved. Type: integer(or Expression with resultType integer). - :type row_count: any - :param row_skips: The number of rows that will be skipped. Type: integer (or Expression with + :vartype row_count: any + :ivar row_skips: The number of rows that will be skipped. Type: integer (or Expression with resultType integer). - :type row_skips: any - :param rfc_table_fields: The fields of the SAP table that will be retrieved. For example, + :vartype row_skips: any + :ivar rfc_table_fields: The fields of the SAP table that will be retrieved. For example, column0, column1. Type: string (or Expression with resultType string). - :type rfc_table_fields: any - :param rfc_table_options: The options for the filtering of the SAP Table. For example, COLUMN0 + :vartype rfc_table_fields: any + :ivar rfc_table_options: The options for the filtering of the SAP Table. For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with resultType string). - :type rfc_table_options: any - :param batch_size: Specifies the maximum number of rows that will be retrieved at a time when + :vartype rfc_table_options: any + :ivar batch_size: Specifies the maximum number of rows that will be retrieved at a time when retrieving data from SAP Table. Type: integer (or Expression with resultType integer). - :type batch_size: any - :param custom_rfc_read_table_function_module: Specifies the custom RFC function module that - will be used to read data from SAP Table. Type: string (or Expression with resultType string). - :type custom_rfc_read_table_function_module: any - :param sap_data_column_delimiter: The single character that will be used as delimiter passed to + :vartype batch_size: any + :ivar custom_rfc_read_table_function_module: Specifies the custom RFC function module that will + be used to read data from SAP Table. Type: string (or Expression with resultType string). + :vartype custom_rfc_read_table_function_module: any + :ivar sap_data_column_delimiter: The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). - :type sap_data_column_delimiter: any - :param partition_option: The partition mechanism that will be used for SAP table read in + :vartype sap_data_column_delimiter: any + :ivar partition_option: The partition mechanism that will be used for SAP table read in parallel. Possible values include: "None", "PartitionOnInt", "PartitionOnCalendarYear", "PartitionOnCalendarMonth", "PartitionOnCalendarDate", "PartitionOnTime". - :type partition_option: any - :param partition_settings: The settings that will be leveraged for SAP table source + :vartype partition_option: any + :ivar partition_settings: The settings that will be leveraged for SAP table source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.SapTablePartitionSettings + :vartype partition_settings: ~azure.mgmt.datafactory.models.SapTablePartitionSettings """ _validation = { @@ -36033,6 +48231,58 @@ def __init__( partition_settings: Optional["SapTablePartitionSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword row_count: The number of rows to be retrieved. Type: integer(or Expression with + resultType integer). + :paramtype row_count: any + :keyword row_skips: The number of rows that will be skipped. Type: integer (or Expression with + resultType integer). + :paramtype row_skips: any + :keyword rfc_table_fields: The fields of the SAP table that will be retrieved. For example, + column0, column1. Type: string (or Expression with resultType string). + :paramtype rfc_table_fields: any + :keyword rfc_table_options: The options for the filtering of the SAP Table. For example, + COLUMN0 EQ SOME VALUE. Type: string (or Expression with resultType string). + :paramtype rfc_table_options: any + :keyword batch_size: Specifies the maximum number of rows that will be retrieved at a time when + retrieving data from SAP Table. Type: integer (or Expression with resultType integer). + :paramtype batch_size: any + :keyword custom_rfc_read_table_function_module: Specifies the custom RFC function module that + will be used to read data from SAP Table. Type: string (or Expression with resultType string). + :paramtype custom_rfc_read_table_function_module: any + :keyword sap_data_column_delimiter: The single character that will be used as delimiter passed + to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with + resultType string). + :paramtype sap_data_column_delimiter: any + :keyword partition_option: The partition mechanism that will be used for SAP table read in + parallel. Possible values include: "None", "PartitionOnInt", "PartitionOnCalendarYear", + "PartitionOnCalendarMonth", "PartitionOnCalendarDate", "PartitionOnTime". + :paramtype partition_option: any + :keyword partition_settings: The settings that will be leveraged for SAP table source + partitioning. + :paramtype partition_settings: ~azure.mgmt.datafactory.models.SapTablePartitionSettings + """ super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapTableSource' # type: str self.row_count = row_count @@ -36053,22 +48303,22 @@ class ScheduleTrigger(MultiplePipelineTrigger): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. Trigger type.Constant filled by server. + :vartype type: str + :ivar description: Trigger description. + :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[any] - :param pipelines: Pipelines that need to be started. - :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param recurrence: Required. Recurrence schedule configuration. - :type recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence + :ivar annotations: List of tags that can be used for describing the trigger. + :vartype annotations: list[any] + :ivar pipelines: Pipelines that need to be started. + :vartype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :ivar recurrence: Required. Recurrence schedule configuration. + :vartype recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence """ _validation = { @@ -36097,6 +48347,19 @@ def __init__( pipelines: Optional[List["TriggerPipelineReference"]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Trigger description. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the trigger. + :paramtype annotations: list[any] + :keyword pipelines: Pipelines that need to be started. + :paramtype pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :keyword recurrence: Required. Recurrence schedule configuration. + :paramtype recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence + """ super(ScheduleTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) self.type = 'ScheduleTrigger' # type: str self.recurrence = recurrence @@ -36105,22 +48368,22 @@ def __init__( class ScheduleTriggerRecurrence(msrest.serialization.Model): """The workflow trigger recurrence. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param frequency: The frequency. Possible values include: "NotSpecified", "Minute", "Hour", + :vartype additional_properties: dict[str, any] + :ivar frequency: The frequency. Possible values include: "NotSpecified", "Minute", "Hour", "Day", "Week", "Month", "Year". - :type frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency - :param interval: The interval. - :type interval: int - :param start_time: The start time. - :type start_time: ~datetime.datetime - :param end_time: The end time. - :type end_time: ~datetime.datetime - :param time_zone: The time zone. - :type time_zone: str - :param schedule: The recurrence schedule. - :type schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule + :vartype frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency + :ivar interval: The interval. + :vartype interval: int + :ivar start_time: The start time. + :vartype start_time: ~datetime.datetime + :ivar end_time: The end time. + :vartype end_time: ~datetime.datetime + :ivar time_zone: The time zone. + :vartype time_zone: str + :ivar schedule: The recurrence schedule. + :vartype schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule """ _attribute_map = { @@ -36145,6 +48408,24 @@ def __init__( schedule: Optional["RecurrenceSchedule"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword frequency: The frequency. Possible values include: "NotSpecified", "Minute", "Hour", + "Day", "Week", "Month", "Year". + :paramtype frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency + :keyword interval: The interval. + :paramtype interval: int + :keyword start_time: The start time. + :paramtype start_time: ~datetime.datetime + :keyword end_time: The end time. + :paramtype end_time: ~datetime.datetime + :keyword time_zone: The time zone. + :paramtype time_zone: str + :keyword schedule: The recurrence schedule. + :paramtype schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule + """ super(ScheduleTriggerRecurrence, self).__init__(**kwargs) self.additional_properties = additional_properties self.frequency = frequency @@ -36160,14 +48441,14 @@ class ScriptAction(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param name: Required. The user provided name of the script action. - :type name: str - :param uri: Required. The URI for the script action. - :type uri: str - :param roles: Required. The node types on which the script action should be executed. - :type roles: str - :param parameters: The parameters for the script action. - :type parameters: str + :ivar name: Required. The user provided name of the script action. + :vartype name: str + :ivar uri: Required. The URI for the script action. + :vartype uri: str + :ivar roles: Required. The node types on which the script action should be executed. + :vartype roles: str + :ivar parameters: The parameters for the script action. + :vartype parameters: str """ _validation = { @@ -36192,6 +48473,16 @@ def __init__( parameters: Optional[str] = None, **kwargs ): + """ + :keyword name: Required. The user provided name of the script action. + :paramtype name: str + :keyword uri: Required. The URI for the script action. + :paramtype uri: str + :keyword roles: Required. The node types on which the script action should be executed. + :paramtype roles: str + :keyword parameters: The parameters for the script action. + :paramtype parameters: str + """ super(ScriptAction, self).__init__(**kwargs) self.name = name self.uri = uri @@ -36204,10 +48495,10 @@ class SecureString(SecretBase): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of the secret.Constant filled by server. - :type type: str - :param value: Required. Value of secure string. - :type value: str + :ivar type: Required. Type of the secret.Constant filled by server. + :vartype type: str + :ivar value: Required. Value of secure string. + :vartype value: str """ _validation = { @@ -36226,6 +48517,10 @@ def __init__( value: str, **kwargs ): + """ + :keyword value: Required. Value of secure string. + :paramtype value: str + """ super(SecureString, self).__init__(**kwargs) self.type = 'SecureString' # type: str self.value = value @@ -36236,14 +48531,14 @@ class SelfDependencyTumblingWindowTriggerReference(DependencyReference): All required parameters must be populated in order to send to Azure. - :param type: Required. The type of dependency reference.Constant filled by server. - :type type: str - :param offset: Required. Timespan applied to the start time of a tumbling window when - evaluating dependency. - :type offset: str - :param size: The size of the window when evaluating the dependency. If undefined the frequency + :ivar type: Required. The type of dependency reference.Constant filled by server. + :vartype type: str + :ivar offset: Required. Timespan applied to the start time of a tumbling window when evaluating + dependency. + :vartype offset: str + :ivar size: The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be used. - :type size: str + :vartype size: str """ _validation = { @@ -36265,6 +48560,14 @@ def __init__( size: Optional[str] = None, **kwargs ): + """ + :keyword offset: Required. Timespan applied to the start time of a tumbling window when + evaluating dependency. + :paramtype offset: str + :keyword size: The size of the window when evaluating the dependency. If undefined the + frequency of the tumbling window will be used. + :paramtype size: str + """ super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) self.type = 'SelfDependencyTumblingWindowTriggerReference' # type: str self.offset = offset @@ -36276,16 +48579,16 @@ class SelfHostedIntegrationRuntime(IntegrationRuntime): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of integration runtime.Constant filled by server. Possible values + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". - :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType - :param description: Integration runtime description. - :type description: str - :param linked_info: The base definition of a linked integration runtime. - :type linked_info: ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType + :vartype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType + :ivar description: Integration runtime description. + :vartype description: str + :ivar linked_info: The base definition of a linked integration runtime. + :vartype linked_info: ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType """ _validation = { @@ -36307,6 +48610,15 @@ def __init__( linked_info: Optional["LinkedIntegrationRuntimeType"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Integration runtime description. + :paramtype description: str + :keyword linked_info: The base definition of a linked integration runtime. + :paramtype linked_info: ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType + """ super(SelfHostedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) self.type = 'SelfHosted' # type: str self.linked_info = linked_info @@ -36317,9 +48629,9 @@ class SelfHostedIntegrationRuntimeNode(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] + :vartype additional_properties: dict[str, any] :ivar node_name: Name of the integration runtime node. :vartype node_name: str :ivar machine_name: Machine name of the integration runtime node. @@ -36414,6 +48726,11 @@ def __init__( additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(SelfHostedIntegrationRuntimeNode, self).__init__(**kwargs) self.additional_properties = additional_properties self.node_name = None @@ -36443,12 +48760,12 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of integration runtime.Constant filled by server. Possible values + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". - :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType + :vartype type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType :ivar data_factory_name: The data factory name which the integration runtime belong to. :vartype data_factory_name: str :ivar state: The state of integration runtime. Possible values include: "Initial", "Stopped", @@ -36466,8 +48783,8 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): ~azure.mgmt.datafactory.models.IntegrationRuntimeInternalChannelEncryptionMode :ivar version: Version of the integration runtime. :vartype version: str - :param nodes: The list of nodes for this integration runtime. - :type nodes: list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] + :ivar nodes: The list of nodes for this integration runtime. + :vartype nodes: list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] :ivar scheduled_update_date: The date at which the integration runtime will be scheduled to update, in ISO8601 format. :vartype scheduled_update_date: ~datetime.datetime @@ -36485,9 +48802,9 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): :vartype auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate :ivar version_status: Status of the integration runtime version. :vartype version_status: str - :param links: The list of linked integration runtimes that are created to share with this + :ivar links: The list of linked integration runtimes that are created to share with this integration runtime. - :type links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] + :vartype links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] :ivar pushed_version: The version that the integration runtime is going to update to. :vartype pushed_version: str :ivar latest_version: The latest version on download center. @@ -36548,6 +48865,16 @@ def __init__( links: Optional[List["LinkedIntegrationRuntime"]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword nodes: The list of nodes for this integration runtime. + :paramtype nodes: list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] + :keyword links: The list of linked integration runtimes that are created to share with this + integration runtime. + :paramtype links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] + """ super(SelfHostedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'SelfHosted' # type: str self.create_time = None @@ -36573,49 +48900,50 @@ class ServiceNowLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar endpoint: Required. The endpoint of the ServiceNow server. (i.e. :code:``.service-now.com). - :type endpoint: any - :param authentication_type: Required. The authentication type to use. Possible values include: + :vartype endpoint: any + :ivar authentication_type: Required. The authentication type to use. Possible values include: "Basic", "OAuth2". - :type authentication_type: str or ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType - :param username: The user name used to connect to the ServiceNow server for Basic and OAuth2 + :vartype authentication_type: str or + ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType + :ivar username: The user name used to connect to the ServiceNow server for Basic and OAuth2 authentication. - :type username: any - :param password: The password corresponding to the user name for Basic and OAuth2 + :vartype username: any + :ivar password: The password corresponding to the user name for Basic and OAuth2 authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param client_id: The client id for OAuth2 authentication. - :type client_id: any - :param client_secret: The client secret for OAuth2 authentication. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar client_id: The client id for OAuth2 authentication. + :vartype client_id: any + :ivar client_secret: The client secret for OAuth2 authentication. + :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -36663,6 +48991,50 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword endpoint: Required. The endpoint of the ServiceNow server. (i.e. + :code:``.service-now.com). + :paramtype endpoint: any + :keyword authentication_type: Required. The authentication type to use. Possible values + include: "Basic", "OAuth2". + :paramtype authentication_type: str or + ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType + :keyword username: The user name used to connect to the ServiceNow server for Basic and OAuth2 + authentication. + :paramtype username: any + :keyword password: The password corresponding to the user name for Basic and OAuth2 + authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword client_id: The client id for OAuth2 authentication. + :paramtype client_id: any + :keyword client_secret: The client secret for OAuth2 authentication. + :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(ServiceNowLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'ServiceNow' # type: str self.endpoint = endpoint @@ -36682,30 +49054,30 @@ class ServiceNowObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -36740,6 +49112,30 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(ServiceNowObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'ServiceNowObject' # type: str self.table_name = table_name @@ -36750,32 +49146,32 @@ class ServiceNowSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -36807,6 +49203,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ServiceNowSource' # type: str self.query = query @@ -36817,21 +49239,21 @@ class ServicePrincipalCredential(Credential): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of credential.Constant filled by server. - :type type: str - :param description: Credential description. - :type description: str - :param annotations: List of tags that can be used for describing the Credential. - :type annotations: list[any] - :param service_principal_id: The app ID of the service principal used to authenticate. - :type service_principal_id: any - :param service_principal_key: The key of the service principal used to authenticate. - :type service_principal_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param tenant: The ID of the tenant to which the service principal belongs. - :type tenant: any + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of credential.Constant filled by server. + :vartype type: str + :ivar description: Credential description. + :vartype description: str + :ivar annotations: List of tags that can be used for describing the Credential. + :vartype annotations: list[any] + :ivar service_principal_id: The app ID of the service principal used to authenticate. + :vartype service_principal_id: any + :ivar service_principal_key: The key of the service principal used to authenticate. + :vartype service_principal_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar tenant: The ID of the tenant to which the service principal belongs. + :vartype tenant: any """ _validation = { @@ -36859,6 +49281,21 @@ def __init__( tenant: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Credential description. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the Credential. + :paramtype annotations: list[any] + :keyword service_principal_id: The app ID of the service principal used to authenticate. + :paramtype service_principal_id: any + :keyword service_principal_key: The key of the service principal used to authenticate. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword tenant: The ID of the tenant to which the service principal belongs. + :paramtype tenant: any + """ super(ServicePrincipalCredential, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) self.type = 'ServicePrincipal' # type: str self.service_principal_id = service_principal_id @@ -36871,23 +49308,23 @@ class SetVariableActivity(ControlActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param variable_name: Name of the variable whose value needs to be set. - :type variable_name: str - :param value: Value to be set. Could be a static value or Expression. - :type value: any + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar variable_name: Name of the variable whose value needs to be set. + :vartype variable_name: str + :ivar value: Value to be set. Could be a static value or Expression. + :vartype value: any """ _validation = { @@ -36918,6 +49355,23 @@ def __init__( value: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword variable_name: Name of the variable whose value needs to be set. + :paramtype variable_name: str + :keyword value: Value to be set. Could be a static value or Expression. + :paramtype value: any + """ super(SetVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.type = 'SetVariable' # type: str self.variable_name = variable_name @@ -36929,17 +49383,17 @@ class SftpLocation(DatasetLocation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage location.Constant filled by server. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: any - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :vartype folder_path: any + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: any + :vartype file_name: any """ _validation = { @@ -36961,6 +49415,17 @@ def __init__( file_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: any + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: any + """ super(SftpLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) self.type = 'SftpLocation' # type: str @@ -36970,47 +49435,47 @@ class SftpReadSettings(StoreReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param recursive: If true, files under the folder path will be read recursively. Default is + :vartype disable_metrics_collection: any + :ivar recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: any - :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or Expression with + :vartype recursive: any + :ivar wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: any - :param wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType + :vartype wildcard_folder_path: any + :ivar wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: any - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param partition_root_path: Specify the root path where partition discovery starts from. Type: + :vartype wildcard_file_name: any + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. + :vartype enable_partition_discovery: bool + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: any - :param file_list_path: Point to a text file that lists each file (relative path to the path + :vartype partition_root_path: any + :ivar file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: any - :param delete_files_after_completion: Indicates whether the source files need to be deleted + :vartype file_list_path: any + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: any - :param modified_datetime_start: The start of file's modified datetime. Type: string (or + :vartype delete_files_after_completion: any + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: any - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + :vartype modified_datetime_start: any + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: any - :param disable_chunking: If true, disable parallel reading within each file. Default is false. + :vartype modified_datetime_end: any + :ivar disable_chunking: If true, disable parallel reading within each file. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_chunking: any + :vartype disable_chunking: any """ _validation = { @@ -37052,6 +49517,47 @@ def __init__( disable_chunking: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: any + :keyword wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or Expression with + resultType string). + :paramtype wildcard_folder_path: any + :keyword wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType + string). + :paramtype wildcard_file_name: any + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. + :paramtype enable_partition_discovery: bool + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: any + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: any + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: any + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: any + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: any + :keyword disable_chunking: If true, disable parallel reading within each file. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_chunking: any + """ super(SftpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SftpReadSettings' # type: str self.recursive = recursive @@ -37071,56 +49577,56 @@ class SftpServerLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. The SFTP server host name. Type: string (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. The SFTP server host name. Type: string (or Expression with resultType string). - :type host: any - :param port: The TCP port number that the SFTP server uses to listen for client connections. + :vartype host: any + :ivar port: The TCP port number that the SFTP server uses to listen for client connections. Default value is 22. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: any - :param authentication_type: The authentication type to be used to connect to the FTP server. + :vartype port: any + :ivar authentication_type: The authentication type to be used to connect to the FTP server. Possible values include: "Basic", "SshPublicKey", "MultiFactor". - :type authentication_type: str or ~azure.mgmt.datafactory.models.SftpAuthenticationType - :param user_name: The username used to log on to the SFTP server. Type: string (or Expression + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.SftpAuthenticationType + :ivar user_name: The username used to log on to the SFTP server. Type: string (or Expression with resultType string). - :type user_name: any - :param password: Password to logon the SFTP server for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype user_name: any + :ivar password: Password to logon the SFTP server for Basic authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param private_key_path: The SSH private key file path for SshPublicKey authentication. Only + :vartype encrypted_credential: any + :ivar private_key_path: The SSH private key file path for SshPublicKey authentication. Only valid for on-premises copy. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. Type: string (or Expression with resultType string). - :type private_key_path: any - :param private_key_content: Base64 encoded SSH private key content for SshPublicKey + :vartype private_key_path: any + :ivar private_key_content: Base64 encoded SSH private key content for SshPublicKey authentication. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. - :type private_key_content: ~azure.mgmt.datafactory.models.SecretBase - :param pass_phrase: The password to decrypt the SSH private key if the SSH private key is + :vartype private_key_content: ~azure.mgmt.datafactory.models.SecretBase + :ivar pass_phrase: The password to decrypt the SSH private key if the SSH private key is encrypted. - :type pass_phrase: ~azure.mgmt.datafactory.models.SecretBase - :param skip_host_key_validation: If true, skip the SSH host key validation. Default value is + :vartype pass_phrase: ~azure.mgmt.datafactory.models.SecretBase + :ivar skip_host_key_validation: If true, skip the SSH host key validation. Default value is false. Type: boolean (or Expression with resultType boolean). - :type skip_host_key_validation: any - :param host_key_fingerprint: The host key finger-print of the SFTP server. When + :vartype skip_host_key_validation: any + :ivar host_key_fingerprint: The host key finger-print of the SFTP server. When SkipHostKeyValidation is false, HostKeyFingerprint should be specified. Type: string (or Expression with resultType string). - :type host_key_fingerprint: any + :vartype host_key_fingerprint: any """ _validation = { @@ -37169,6 +49675,56 @@ def __init__( host_key_fingerprint: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. The SFTP server host name. Type: string (or Expression with resultType + string). + :paramtype host: any + :keyword port: The TCP port number that the SFTP server uses to listen for client connections. + Default value is 22. Type: integer (or Expression with resultType integer), minimum: 0. + :paramtype port: any + :keyword authentication_type: The authentication type to be used to connect to the FTP server. + Possible values include: "Basic", "SshPublicKey", "MultiFactor". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.SftpAuthenticationType + :keyword user_name: The username used to log on to the SFTP server. Type: string (or Expression + with resultType string). + :paramtype user_name: any + :keyword password: Password to logon the SFTP server for Basic authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword private_key_path: The SSH private key file path for SshPublicKey authentication. Only + valid for on-premises copy. For on-premises copy with SshPublicKey authentication, either + PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH + format. Type: string (or Expression with resultType string). + :paramtype private_key_path: any + :keyword private_key_content: Base64 encoded SSH private key content for SshPublicKey + authentication. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or + PrivateKeyContent should be specified. SSH private key should be OpenSSH format. + :paramtype private_key_content: ~azure.mgmt.datafactory.models.SecretBase + :keyword pass_phrase: The password to decrypt the SSH private key if the SSH private key is + encrypted. + :paramtype pass_phrase: ~azure.mgmt.datafactory.models.SecretBase + :keyword skip_host_key_validation: If true, skip the SSH host key validation. Default value is + false. Type: boolean (or Expression with resultType boolean). + :paramtype skip_host_key_validation: any + :keyword host_key_fingerprint: The host key finger-print of the SFTP server. When + SkipHostKeyValidation is false, HostKeyFingerprint should be specified. Type: string (or + Expression with resultType string). + :paramtype host_key_fingerprint: any + """ super(SftpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Sftp' # type: str self.host = host @@ -37189,26 +49745,26 @@ class SftpWriteSettings(StoreWriteSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The write setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype additional_properties: dict[str, any] + :ivar type: Required. The write setting type.Constant filled by server. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: any - :param operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default + :vartype disable_metrics_collection: any + :ivar copy_behavior: The type of copy behavior for copy sink. + :vartype copy_behavior: any + :ivar operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). - :type operation_timeout: any - :param use_temp_file_rename: Upload to temporary file(s) and rename. Disable this option if - your SFTP server doesn't support rename operation. Type: boolean (or Expression with resultType + :vartype operation_timeout: any + :ivar use_temp_file_rename: Upload to temporary file(s) and rename. Disable this option if your + SFTP server doesn't support rename operation. Type: boolean (or Expression with resultType boolean). - :type use_temp_file_rename: any + :vartype use_temp_file_rename: any """ _validation = { @@ -37236,6 +49792,26 @@ def __init__( use_temp_file_rename: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword copy_behavior: The type of copy behavior for copy sink. + :paramtype copy_behavior: any + :keyword operation_timeout: Specifies the timeout for writing each chunk to SFTP server. + Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). + :paramtype operation_timeout: any + :keyword use_temp_file_rename: Upload to temporary file(s) and rename. Disable this option if + your SFTP server doesn't support rename operation. Type: boolean (or Expression with resultType + boolean). + :paramtype use_temp_file_rename: any + """ super(SftpWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'SftpWriteSettings' # type: str self.operation_timeout = operation_timeout @@ -37247,38 +49823,38 @@ class SharePointOnlineListLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param site_url: Required. The URL of the SharePoint Online site. For example, + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar site_url: Required. The URL of the SharePoint Online site. For example, https://contoso.sharepoint.com/sites/siteName. Type: string (or Expression with resultType string). - :type site_url: any - :param tenant_id: Required. The tenant ID under which your application resides. You can find it + :vartype site_url: any + :ivar tenant_id: Required. The tenant ID under which your application resides. You can find it from Azure portal Active Directory overview page. Type: string (or Expression with resultType string). - :type tenant_id: any - :param service_principal_id: Required. The application (client) ID of your application + :vartype tenant_id: any + :ivar service_principal_id: Required. The application (client) ID of your application registered in Azure Active Directory. Make sure to grant SharePoint site permission to this application. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: Required. The client secret of your application registered in + :vartype service_principal_id: any + :ivar service_principal_key: Required. The client secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -37318,6 +49894,38 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword site_url: Required. The URL of the SharePoint Online site. For example, + https://contoso.sharepoint.com/sites/siteName. Type: string (or Expression with resultType + string). + :paramtype site_url: any + :keyword tenant_id: Required. The tenant ID under which your application resides. You can find + it from Azure portal Active Directory overview page. Type: string (or Expression with + resultType string). + :paramtype tenant_id: any + :keyword service_principal_id: Required. The application (client) ID of your application + registered in Azure Active Directory. Make sure to grant SharePoint site permission to this + application. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: Required. The client secret of your application registered in + Azure Active Directory. Type: string (or Expression with resultType string). + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(SharePointOnlineListLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'SharePointOnlineList' # type: str self.site_url = site_url @@ -37332,31 +49940,31 @@ class SharePointOnlineListResourceDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param list_name: The name of the SharePoint Online list. Type: string (or Expression with + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar list_name: The name of the SharePoint Online list. Type: string (or Expression with resultType string). - :type list_name: any + :vartype list_name: any """ _validation = { @@ -37391,6 +49999,31 @@ def __init__( list_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword list_name: The name of the SharePoint Online list. Type: string (or Expression with + resultType string). + :paramtype list_name: any + """ super(SharePointOnlineListResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'SharePointOnlineListResource' # type: str self.list_name = list_name @@ -37401,30 +50034,30 @@ class SharePointOnlineListSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: The OData query to filter the data in SharePoint Online list. For example, + :vartype disable_metrics_collection: any + :ivar query: The OData query to filter the data in SharePoint Online list. For example, "$top=1". Type: string (or Expression with resultType string). - :type query: any - :param http_request_timeout: The wait time to get a response from SharePoint Online. Default + :vartype query: any + :ivar http_request_timeout: The wait time to get a response from SharePoint Online. Default value is 5 minutes (00:05:00). Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: any + :vartype http_request_timeout: any """ _validation = { @@ -37454,6 +50087,30 @@ def __init__( http_request_timeout: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: The OData query to filter the data in SharePoint Online list. For example, + "$top=1". Type: string (or Expression with resultType string). + :paramtype query: any + :keyword http_request_timeout: The wait time to get a response from SharePoint Online. Default + value is 5 minutes (00:05:00). Type: string (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype http_request_timeout: any + """ super(SharePointOnlineListSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SharePointOnlineListSource' # type: str self.query = query @@ -37465,38 +50122,38 @@ class ShopifyLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. The endpoint of the Shopify server. (i.e. mystore.myshopify.com). - :type host: any - :param access_token: The API access token that can be used to access Shopify’s data. The token + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. The endpoint of the Shopify server. (i.e. mystore.myshopify.com). + :vartype host: any + :ivar access_token: The API access token that can be used to access Shopify’s data. The token won't expire if it is offline mode. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype access_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -37535,6 +50192,38 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. The endpoint of the Shopify server. (i.e. mystore.myshopify.com). + :paramtype host: any + :keyword access_token: The API access token that can be used to access Shopify’s data. The + token won't expire if it is offline mode. + :paramtype access_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(ShopifyLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Shopify' # type: str self.host = host @@ -37550,30 +50239,30 @@ class ShopifyObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -37608,6 +50297,30 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(ShopifyObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'ShopifyObject' # type: str self.table_name = table_name @@ -37618,32 +50331,32 @@ class ShopifySource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -37675,6 +50388,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ShopifySource' # type: str self.query = query @@ -37683,12 +50422,12 @@ def __init__( class SkipErrorFile(msrest.serialization.Model): """Skip error file. - :param file_missing: Skip if file is deleted by other client during copy. Default is true. - Type: boolean (or Expression with resultType boolean). - :type file_missing: any - :param data_inconsistency: Skip if source/sink file changed by other concurrent write. Default + :ivar file_missing: Skip if file is deleted by other client during copy. Default is true. Type: + boolean (or Expression with resultType boolean). + :vartype file_missing: any + :ivar data_inconsistency: Skip if source/sink file changed by other concurrent write. Default is false. Type: boolean (or Expression with resultType boolean). - :type data_inconsistency: any + :vartype data_inconsistency: any """ _attribute_map = { @@ -37703,6 +50442,14 @@ def __init__( data_inconsistency: Optional[Any] = None, **kwargs ): + """ + :keyword file_missing: Skip if file is deleted by other client during copy. Default is true. + Type: boolean (or Expression with resultType boolean). + :paramtype file_missing: any + :keyword data_inconsistency: Skip if source/sink file changed by other concurrent write. + Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype data_inconsistency: any + """ super(SkipErrorFile, self).__init__(**kwargs) self.file_missing = file_missing self.data_inconsistency = data_inconsistency @@ -37713,34 +50460,34 @@ class SnowflakeDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param schema_type_properties_schema: The schema name of the Snowflake database. Type: string + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar schema_type_properties_schema: The schema name of the Snowflake database. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any - :param table: The table name of the Snowflake database. Type: string (or Expression with + :vartype schema_type_properties_schema: any + :ivar table: The table name of the Snowflake database. Type: string (or Expression with resultType string). - :type table: any + :vartype table: any """ _validation = { @@ -37777,6 +50524,34 @@ def __init__( table: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword schema_type_properties_schema: The schema name of the Snowflake database. Type: string + (or Expression with resultType string). + :paramtype schema_type_properties_schema: any + :keyword table: The table name of the Snowflake database. Type: string (or Expression with + resultType string). + :paramtype table: any + """ super(SnowflakeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'SnowflakeTable' # type: str self.schema_type_properties_schema = schema_type_properties_schema @@ -37788,21 +50563,21 @@ class SnowflakeExportCopyCommand(ExportSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The export setting type.Constant filled by server. - :type type: str - :param additional_copy_options: Additional copy options directly passed to snowflake Copy + :vartype additional_properties: dict[str, any] + :ivar type: Required. The export setting type.Constant filled by server. + :vartype type: str + :ivar additional_copy_options: Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": "'HH24:MI:SS.FF'" }. - :type additional_copy_options: dict[str, any] - :param additional_format_options: Additional format options directly passed to snowflake Copy + :vartype additional_copy_options: dict[str, any] + :ivar additional_format_options: Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": "'FALSE'" }. - :type additional_format_options: dict[str, any] + :vartype additional_format_options: dict[str, any] """ _validation = { @@ -37824,6 +50599,21 @@ def __init__( additional_format_options: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword additional_copy_options: Additional copy options directly passed to snowflake Copy + Command. Type: key value pairs (value should be string type) (or Expression with resultType + object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": + "'HH24:MI:SS.FF'" }. + :paramtype additional_copy_options: dict[str, any] + :keyword additional_format_options: Additional format options directly passed to snowflake Copy + Command. Type: key value pairs (value should be string type) (or Expression with resultType + object). Example: "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": "'FALSE'" + }. + :paramtype additional_format_options: dict[str, any] + """ super(SnowflakeExportCopyCommand, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'SnowflakeExportCopyCommand' # type: str self.additional_copy_options = additional_copy_options @@ -37835,21 +50625,21 @@ class SnowflakeImportCopyCommand(ImportSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The import setting type.Constant filled by server. - :type type: str - :param additional_copy_options: Additional copy options directly passed to snowflake Copy + :vartype additional_properties: dict[str, any] + :ivar type: Required. The import setting type.Constant filled by server. + :vartype type: str + :ivar additional_copy_options: Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": "'HH24:MI:SS.FF'" }. - :type additional_copy_options: dict[str, any] - :param additional_format_options: Additional format options directly passed to snowflake Copy + :vartype additional_copy_options: dict[str, any] + :ivar additional_format_options: Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": "'FALSE'" }. - :type additional_format_options: dict[str, any] + :vartype additional_format_options: dict[str, any] """ _validation = { @@ -37871,6 +50661,21 @@ def __init__( additional_format_options: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword additional_copy_options: Additional copy options directly passed to snowflake Copy + Command. Type: key value pairs (value should be string type) (or Expression with resultType + object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": + "'HH24:MI:SS.FF'" }. + :paramtype additional_copy_options: dict[str, any] + :keyword additional_format_options: Additional format options directly passed to snowflake Copy + Command. Type: key value pairs (value should be string type) (or Expression with resultType + object). Example: "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": + "'FALSE'" }. + :paramtype additional_format_options: dict[str, any] + """ super(SnowflakeImportCopyCommand, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'SnowflakeImportCopyCommand' # type: str self.additional_copy_options = additional_copy_options @@ -37882,28 +50687,28 @@ class SnowflakeLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The connection string of snowflake. Type: string, + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The connection string of snowflake. Type: string, SecureString. - :type connection_string: any - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_string: any + :ivar password: The Azure key vault secret reference of password in connection string. + :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -37936,6 +50741,28 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The connection string of snowflake. Type: string, + SecureString. + :paramtype connection_string: any + :keyword password: The Azure key vault secret reference of password in connection string. + :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(SnowflakeLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Snowflake' # type: str self.connection_string = connection_string @@ -37948,34 +50775,34 @@ class SnowflakeSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + :vartype disable_metrics_collection: any + :ivar pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: any - :param import_settings: Snowflake import settings. - :type import_settings: ~azure.mgmt.datafactory.models.SnowflakeImportCopyCommand + :vartype pre_copy_script: any + :ivar import_settings: Snowflake import settings. + :vartype import_settings: ~azure.mgmt.datafactory.models.SnowflakeImportCopyCommand """ _validation = { @@ -38009,6 +50836,34 @@ def __init__( import_settings: Optional["SnowflakeImportCopyCommand"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :paramtype pre_copy_script: any + :keyword import_settings: Snowflake import settings. + :paramtype import_settings: ~azure.mgmt.datafactory.models.SnowflakeImportCopyCommand + """ super(SnowflakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SnowflakeSink' # type: str self.pre_copy_script = pre_copy_script @@ -38020,27 +50875,27 @@ class SnowflakeSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query: Snowflake Sql query. Type: string (or Expression with resultType string). - :type query: any - :param export_settings: Snowflake export settings. - :type export_settings: ~azure.mgmt.datafactory.models.SnowflakeExportCopyCommand + :vartype disable_metrics_collection: any + :ivar query: Snowflake Sql query. Type: string (or Expression with resultType string). + :vartype query: any + :ivar export_settings: Snowflake export settings. + :vartype export_settings: ~azure.mgmt.datafactory.models.SnowflakeExportCopyCommand """ _validation = { @@ -38070,6 +50925,27 @@ def __init__( export_settings: Optional["SnowflakeExportCopyCommand"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query: Snowflake Sql query. Type: string (or Expression with resultType string). + :paramtype query: any + :keyword export_settings: Snowflake export settings. + :paramtype export_settings: ~azure.mgmt.datafactory.models.SnowflakeExportCopyCommand + """ super(SnowflakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SnowflakeSource' # type: str self.query = query @@ -38081,62 +50957,61 @@ class SparkLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param host: Required. IP address or host name of the Spark server. - :type host: any - :param port: Required. The TCP port that the Spark server uses to listen for client - connections. - :type port: any - :param server_type: The type of Spark server. Possible values include: "SharkServer", + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar host: Required. IP address or host name of the Spark server. + :vartype host: any + :ivar port: Required. The TCP port that the Spark server uses to listen for client connections. + :vartype port: any + :ivar server_type: The type of Spark server. Possible values include: "SharkServer", "SharkServer2", "SparkThriftServer". - :type server_type: str or ~azure.mgmt.datafactory.models.SparkServerType - :param thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible + :vartype server_type: str or ~azure.mgmt.datafactory.models.SparkServerType + :ivar thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible values include: "Binary", "SASL", "HTTP ". - :type thrift_transport_protocol: str or + :vartype thrift_transport_protocol: str or ~azure.mgmt.datafactory.models.SparkThriftTransportProtocol - :param authentication_type: Required. The authentication method used to access the Spark - server. Possible values include: "Anonymous", "Username", "UsernameAndPassword", + :ivar authentication_type: Required. The authentication method used to access the Spark server. + Possible values include: "Anonymous", "Username", "UsernameAndPassword", "WindowsAzureHDInsightService". - :type authentication_type: str or ~azure.mgmt.datafactory.models.SparkAuthenticationType - :param username: The user name that you use to access Spark Server. - :type username: any - :param password: The password corresponding to the user name that you provided in the Username + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.SparkAuthenticationType + :ivar username: The user name that you use to access Spark Server. + :vartype username: any + :ivar password: The password corresponding to the user name that you provided in the Username field. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param http_path: The partial URL corresponding to the Spark server. - :type http_path: any - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar http_path: The partial URL corresponding to the Spark server. + :vartype http_path: any + :ivar enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :type enable_ssl: any - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + :vartype enable_ssl: any + :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: any - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + :vartype trusted_cert_path: any + :ivar use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: any - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + :vartype use_system_trust_store: any + :ivar allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: any - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + :vartype allow_host_name_cn_mismatch: any + :ivar allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype allow_self_signed_server_cert: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -38193,6 +51068,62 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword host: Required. IP address or host name of the Spark server. + :paramtype host: any + :keyword port: Required. The TCP port that the Spark server uses to listen for client + connections. + :paramtype port: any + :keyword server_type: The type of Spark server. Possible values include: "SharkServer", + "SharkServer2", "SparkThriftServer". + :paramtype server_type: str or ~azure.mgmt.datafactory.models.SparkServerType + :keyword thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible + values include: "Binary", "SASL", "HTTP ". + :paramtype thrift_transport_protocol: str or + ~azure.mgmt.datafactory.models.SparkThriftTransportProtocol + :keyword authentication_type: Required. The authentication method used to access the Spark + server. Possible values include: "Anonymous", "Username", "UsernameAndPassword", + "WindowsAzureHDInsightService". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.SparkAuthenticationType + :keyword username: The user name that you use to access Spark Server. + :paramtype username: any + :keyword password: The password corresponding to the user name that you provided in the + Username field. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword http_path: The partial URL corresponding to the Spark server. + :paramtype http_path: any + :keyword enable_ssl: Specifies whether the connections to the server are encrypted using SSL. + The default value is false. + :paramtype enable_ssl: any + :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates + for verifying the server when connecting over SSL. This property can only be set when using SSL + on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :paramtype trusted_cert_path: any + :keyword use_system_trust_store: Specifies whether to use a CA certificate from the system + trust store or from a specified PEM file. The default value is false. + :paramtype use_system_trust_store: any + :keyword allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :paramtype allow_host_name_cn_mismatch: any + :keyword allow_self_signed_server_cert: Specifies whether to allow self-signed certificates + from the server. The default value is false. + :paramtype allow_self_signed_server_cert: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(SparkLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Spark' # type: str self.host = host @@ -38216,36 +51147,36 @@ class SparkObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param table: The table name of the Spark. Type: string (or Expression with resultType string). - :type table: any - :param schema_type_properties_schema: The schema name of the Spark. Type: string (or Expression + :vartype table_name: any + :ivar table: The table name of the Spark. Type: string (or Expression with resultType string). + :vartype table: any + :ivar schema_type_properties_schema: The schema name of the Spark. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any + :vartype schema_type_properties_schema: any """ _validation = { @@ -38284,6 +51215,37 @@ def __init__( schema_type_properties_schema: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword table: The table name of the Spark. Type: string (or Expression with resultType + string). + :paramtype table: any + :keyword schema_type_properties_schema: The schema name of the Spark. Type: string (or + Expression with resultType string). + :paramtype schema_type_properties_schema: any + """ super(SparkObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'SparkObject' # type: str self.table_name = table_name @@ -38296,32 +51258,32 @@ class SparkSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -38353,6 +51315,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SparkSource' # type: str self.query = query @@ -38363,19 +51351,19 @@ class SqlAlwaysEncryptedProperties(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param always_encrypted_akv_auth_type: Required. Sql always encrypted AKV authentication type. + :ivar always_encrypted_akv_auth_type: Required. Sql always encrypted AKV authentication type. Type: string (or Expression with resultType string). Possible values include: "ServicePrincipal", "ManagedIdentity", "UserAssignedManagedIdentity". - :type always_encrypted_akv_auth_type: str or + :vartype always_encrypted_akv_auth_type: str or ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedAkvAuthType - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Azure Key Vault authentication. Type: string (or Expression with resultType string). - :type service_principal_id: any - :param service_principal_key: The key of the service principal used to authenticate against + :ivar service_principal_id: The client ID of the application in Azure Active Directory used for + Azure Key Vault authentication. Type: string (or Expression with resultType string). + :vartype service_principal_id: any + :ivar service_principal_key: The key of the service principal used to authenticate against Azure Key Vault. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -38398,6 +51386,21 @@ def __init__( credential: Optional["CredentialReference"] = None, **kwargs ): + """ + :keyword always_encrypted_akv_auth_type: Required. Sql always encrypted AKV authentication + type. Type: string (or Expression with resultType string). Possible values include: + "ServicePrincipal", "ManagedIdentity", "UserAssignedManagedIdentity". + :paramtype always_encrypted_akv_auth_type: str or + ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedAkvAuthType + :keyword service_principal_id: The client ID of the application in Azure Active Directory used + for Azure Key Vault authentication. Type: string (or Expression with resultType string). + :paramtype service_principal_id: any + :keyword service_principal_key: The key of the service principal used to authenticate against + Azure Key Vault. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(SqlAlwaysEncryptedProperties, self).__init__(**kwargs) self.always_encrypted_akv_auth_type = always_encrypted_akv_auth_type self.service_principal_id = service_principal_id @@ -38410,54 +51413,54 @@ class SqlDWSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + :vartype disable_metrics_collection: any + :ivar pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: any - :param allow_poly_base: Indicates to use PolyBase to copy data into SQL Data Warehouse when + :vartype pre_copy_script: any + :ivar allow_poly_base: Indicates to use PolyBase to copy data into SQL Data Warehouse when applicable. Type: boolean (or Expression with resultType boolean). - :type allow_poly_base: any - :param poly_base_settings: Specifies PolyBase-related settings when allowPolyBase is true. - :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings - :param allow_copy_command: Indicates to use Copy Command to copy data into SQL Data Warehouse. + :vartype allow_poly_base: any + :ivar poly_base_settings: Specifies PolyBase-related settings when allowPolyBase is true. + :vartype poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings + :ivar allow_copy_command: Indicates to use Copy Command to copy data into SQL Data Warehouse. Type: boolean (or Expression with resultType boolean). - :type allow_copy_command: any - :param copy_command_settings: Specifies Copy Command related settings when allowCopyCommand is + :vartype allow_copy_command: any + :ivar copy_command_settings: Specifies Copy Command related settings when allowCopyCommand is true. - :type copy_command_settings: ~azure.mgmt.datafactory.models.DWCopyCommandSettings - :param table_option: The option to handle sink table, such as autoCreate. For now only + :vartype copy_command_settings: ~azure.mgmt.datafactory.models.DWCopyCommandSettings + :ivar table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: any - :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + :vartype table_option: any + :ivar sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). - :type sql_writer_use_table_lock: any - :param write_behavior: Write behavior when copying data into azure SQL DW. Type: + :vartype sql_writer_use_table_lock: any + :ivar write_behavior: Write behavior when copying data into azure SQL DW. Type: SqlDWWriteBehaviorEnum (or Expression with resultType SqlDWWriteBehaviorEnum). - :type write_behavior: any - :param upsert_settings: SQL DW upsert settings. - :type upsert_settings: ~azure.mgmt.datafactory.models.SqlDWUpsertSettings + :vartype write_behavior: any + :ivar upsert_settings: SQL DW upsert settings. + :vartype upsert_settings: ~azure.mgmt.datafactory.models.SqlDWUpsertSettings """ _validation = { @@ -38505,6 +51508,54 @@ def __init__( upsert_settings: Optional["SqlDWUpsertSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :paramtype pre_copy_script: any + :keyword allow_poly_base: Indicates to use PolyBase to copy data into SQL Data Warehouse when + applicable. Type: boolean (or Expression with resultType boolean). + :paramtype allow_poly_base: any + :keyword poly_base_settings: Specifies PolyBase-related settings when allowPolyBase is true. + :paramtype poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings + :keyword allow_copy_command: Indicates to use Copy Command to copy data into SQL Data + Warehouse. Type: boolean (or Expression with resultType boolean). + :paramtype allow_copy_command: any + :keyword copy_command_settings: Specifies Copy Command related settings when allowCopyCommand + is true. + :paramtype copy_command_settings: ~azure.mgmt.datafactory.models.DWCopyCommandSettings + :keyword table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :paramtype table_option: any + :keyword sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean + (or Expression with resultType boolean). + :paramtype sql_writer_use_table_lock: any + :keyword write_behavior: Write behavior when copying data into azure SQL DW. Type: + SqlDWWriteBehaviorEnum (or Expression with resultType SqlDWWriteBehaviorEnum). + :paramtype write_behavior: any + :keyword upsert_settings: SQL DW upsert settings. + :paramtype upsert_settings: ~azure.mgmt.datafactory.models.SqlDWUpsertSettings + """ super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SqlDWSink' # type: str self.pre_copy_script = pre_copy_script @@ -38523,45 +51574,45 @@ class SqlDWSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with + :vartype additional_columns: any + :ivar sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: any - :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Data Warehouse + :vartype sql_reader_query: any + :ivar sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Data Warehouse source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: any - :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + :vartype sql_reader_stored_procedure_name: any + :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. - :type stored_procedure_parameters: any - :param partition_option: The partition mechanism that will be used for Sql read in parallel. + :vartype stored_procedure_parameters: any + :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: any - :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings + :vartype partition_option: any + :ivar partition_settings: The settings that will be leveraged for Sql source partitioning. + :vartype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -38601,6 +51652,45 @@ def __init__( partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with + resultType string). + :paramtype sql_reader_query: any + :keyword sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Data + Warehouse source. This cannot be used at the same time as SqlReaderQuery. Type: string (or + Expression with resultType string). + :paramtype sql_reader_stored_procedure_name: any + :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType + object), itemType: StoredProcedureParameter. + :paramtype stored_procedure_parameters: any + :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :paramtype partition_option: any + :keyword partition_settings: The settings that will be leveraged for Sql source partitioning. + :paramtype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings + """ super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlDWSource' # type: str self.sql_reader_query = sql_reader_query @@ -38613,12 +51703,12 @@ def __init__( class SqlDWUpsertSettings(msrest.serialization.Model): """Sql DW upsert option settings. - :param interim_schema_name: Schema name for interim table. Type: string (or Expression with + :ivar interim_schema_name: Schema name for interim table. Type: string (or Expression with resultType string). - :type interim_schema_name: any - :param keys: Key column names for unique row identification. Type: array of strings (or + :vartype interim_schema_name: any + :ivar keys: Key column names for unique row identification. Type: array of strings (or Expression with resultType array of strings). - :type keys: any + :vartype keys: any """ _attribute_map = { @@ -38633,6 +51723,14 @@ def __init__( keys: Optional[Any] = None, **kwargs ): + """ + :keyword interim_schema_name: Schema name for interim table. Type: string (or Expression with + resultType string). + :paramtype interim_schema_name: any + :keyword keys: Key column names for unique row identification. Type: array of strings (or + Expression with resultType array of strings). + :paramtype keys: any + """ super(SqlDWUpsertSettings, self).__init__(**kwargs) self.interim_schema_name = interim_schema_name self.keys = keys @@ -38643,55 +51741,55 @@ class SqlMISink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + :vartype disable_metrics_collection: any + :ivar sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: any - :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with - resultType string). - :type sql_writer_table_type: any - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + :vartype sql_writer_stored_procedure_name: any + :ivar sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). - :type pre_copy_script: any - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, + :vartype sql_writer_table_type: any + :ivar pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :vartype pre_copy_script: any + :ivar stored_procedure_parameters: SQL stored procedure parameters. + :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + :ivar stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). - :type stored_procedure_table_type_parameter_name: any - :param table_option: The option to handle sink table, such as autoCreate. For now only + :vartype stored_procedure_table_type_parameter_name: any + :ivar table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: any - :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + :vartype table_option: any + :ivar sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). - :type sql_writer_use_table_lock: any - :param write_behavior: White behavior when copying data into azure SQL MI. Type: + :vartype sql_writer_use_table_lock: any + :ivar write_behavior: White behavior when copying data into azure SQL MI. Type: SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). - :type write_behavior: any - :param upsert_settings: SQL upsert settings. - :type upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings + :vartype write_behavior: any + :ivar upsert_settings: SQL upsert settings. + :vartype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings """ _validation = { @@ -38739,6 +51837,55 @@ def __init__( upsert_settings: Optional["SqlUpsertSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + Expression with resultType string). + :paramtype sql_writer_stored_procedure_name: any + :keyword sql_writer_table_type: SQL writer table type. Type: string (or Expression with + resultType string). + :paramtype sql_writer_table_type: any + :keyword pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :paramtype pre_copy_script: any + :keyword stored_procedure_parameters: SQL stored procedure parameters. + :paramtype stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :keyword stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + table type. Type: string (or Expression with resultType string). + :paramtype stored_procedure_table_type_parameter_name: any + :keyword table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :paramtype table_option: any + :keyword sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean + (or Expression with resultType boolean). + :paramtype sql_writer_use_table_lock: any + :keyword write_behavior: White behavior when copying data into azure SQL MI. Type: + SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + :paramtype write_behavior: any + :keyword upsert_settings: SQL upsert settings. + :paramtype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings + """ super(SqlMISink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SqlMISink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name @@ -38757,46 +51904,46 @@ class SqlMISource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: any - :param sql_reader_stored_procedure_name: Name of the stored procedure for a Azure SQL Managed + :vartype additional_columns: any + :ivar sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :vartype sql_reader_query: any + :ivar sql_reader_stored_procedure_name: Name of the stored procedure for a Azure SQL Managed Instance source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: any - :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + :vartype sql_reader_stored_procedure_name: any + :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, + :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: any - :param partition_option: The partition mechanism that will be used for Sql read in parallel. + :ivar produce_additional_types: Which additional types to produce. + :vartype produce_additional_types: any + :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: any - :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings + :vartype partition_option: any + :ivar partition_settings: The settings that will be leveraged for Sql source partitioning. + :vartype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -38838,6 +51985,47 @@ def __init__( partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword sql_reader_query: SQL reader query. Type: string (or Expression with resultType + string). + :paramtype sql_reader_query: any + :keyword sql_reader_stored_procedure_name: Name of the stored procedure for a Azure SQL Managed + Instance source. This cannot be used at the same time as SqlReaderQuery. Type: string (or + Expression with resultType string). + :paramtype sql_reader_stored_procedure_name: any + :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :paramtype stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :keyword produce_additional_types: Which additional types to produce. + :paramtype produce_additional_types: any + :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :paramtype partition_option: any + :keyword partition_settings: The settings that will be leveraged for Sql source partitioning. + :paramtype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings + """ super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlMISource' # type: str self.sql_reader_query = sql_reader_query @@ -38851,21 +52039,21 @@ def __init__( class SqlPartitionSettings(msrest.serialization.Model): """The settings that will be leveraged for Sql source partitioning. - :param partition_column_name: The name of the column in integer or datetime type that will be + :ivar partition_column_name: The name of the column in integer or datetime type that will be used for proceeding partitioning. If not specified, the primary key of the table is auto-detected and used as the partition column. Type: string (or Expression with resultType string). - :type partition_column_name: any - :param partition_upper_bound: The maximum value of the partition column for partition range + :vartype partition_column_name: any + :ivar partition_upper_bound: The maximum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). - :type partition_upper_bound: any - :param partition_lower_bound: The minimum value of the partition column for partition range + :vartype partition_upper_bound: any + :ivar partition_lower_bound: The minimum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). - :type partition_lower_bound: any + :vartype partition_lower_bound: any """ _attribute_map = { @@ -38882,6 +52070,23 @@ def __init__( partition_lower_bound: Optional[Any] = None, **kwargs ): + """ + :keyword partition_column_name: The name of the column in integer or datetime type that will be + used for proceeding partitioning. If not specified, the primary key of the table is + auto-detected and used as the partition column. Type: string (or Expression with resultType + string). + :paramtype partition_column_name: any + :keyword partition_upper_bound: The maximum value of the partition column for partition range + splitting. This value is used to decide the partition stride, not for filtering the rows in + table. All rows in the table or query result will be partitioned and copied. Type: string (or + Expression with resultType string). + :paramtype partition_upper_bound: any + :keyword partition_lower_bound: The minimum value of the partition column for partition range + splitting. This value is used to decide the partition stride, not for filtering the rows in + table. All rows in the table or query result will be partitioned and copied. Type: string (or + Expression with resultType string). + :paramtype partition_lower_bound: any + """ super(SqlPartitionSettings, self).__init__(**kwargs) self.partition_column_name = partition_column_name self.partition_upper_bound = partition_upper_bound @@ -38893,33 +52098,33 @@ class SqlServerLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Required. The connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param user_name: The on-premises Windows authentication user name. Type: string (or Expression + :vartype connection_string: any + :ivar user_name: The on-premises Windows authentication user name. Type: string (or Expression with resultType string). - :type user_name: any - :param password: The on-premises Windows authentication password. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype user_name: any + :ivar password: The on-premises Windows authentication password. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any - :param always_encrypted_settings: Sql always encrypted properties. - :type always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + :vartype encrypted_credential: any + :ivar always_encrypted_settings: Sql always encrypted properties. + :vartype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties """ _validation = { @@ -38956,6 +52161,34 @@ def __init__( always_encrypted_settings: Optional["SqlAlwaysEncryptedProperties"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword user_name: The on-premises Windows authentication user name. Type: string (or + Expression with resultType string). + :paramtype user_name: any + :keyword password: The on-premises Windows authentication password. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + :keyword always_encrypted_settings: Sql always encrypted properties. + :paramtype always_encrypted_settings: + ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + """ super(SqlServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'SqlServer' # type: str self.connection_string = connection_string @@ -38970,55 +52203,55 @@ class SqlServerSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + :vartype disable_metrics_collection: any + :ivar sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: any - :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with - resultType string). - :type sql_writer_table_type: any - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + :vartype sql_writer_stored_procedure_name: any + :ivar sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). - :type pre_copy_script: any - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, + :vartype sql_writer_table_type: any + :ivar pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :vartype pre_copy_script: any + :ivar stored_procedure_parameters: SQL stored procedure parameters. + :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + :ivar stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). - :type stored_procedure_table_type_parameter_name: any - :param table_option: The option to handle sink table, such as autoCreate. For now only + :vartype stored_procedure_table_type_parameter_name: any + :ivar table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: any - :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + :vartype table_option: any + :ivar sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). - :type sql_writer_use_table_lock: any - :param write_behavior: Write behavior when copying data into sql server. Type: + :vartype sql_writer_use_table_lock: any + :ivar write_behavior: Write behavior when copying data into sql server. Type: SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). - :type write_behavior: any - :param upsert_settings: SQL upsert settings. - :type upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings + :vartype write_behavior: any + :ivar upsert_settings: SQL upsert settings. + :vartype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings """ _validation = { @@ -39066,6 +52299,55 @@ def __init__( upsert_settings: Optional["SqlUpsertSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + Expression with resultType string). + :paramtype sql_writer_stored_procedure_name: any + :keyword sql_writer_table_type: SQL writer table type. Type: string (or Expression with + resultType string). + :paramtype sql_writer_table_type: any + :keyword pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :paramtype pre_copy_script: any + :keyword stored_procedure_parameters: SQL stored procedure parameters. + :paramtype stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :keyword stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + table type. Type: string (or Expression with resultType string). + :paramtype stored_procedure_table_type_parameter_name: any + :keyword table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :paramtype table_option: any + :keyword sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean + (or Expression with resultType boolean). + :paramtype sql_writer_use_table_lock: any + :keyword write_behavior: Write behavior when copying data into sql server. Type: + SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + :paramtype write_behavior: any + :keyword upsert_settings: SQL upsert settings. + :paramtype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings + """ super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SqlServerSink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name @@ -39084,46 +52366,46 @@ class SqlServerSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: any - :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database - source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression - with resultType string). - :type sql_reader_stored_procedure_name: any - :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + :vartype additional_columns: any + :ivar sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :vartype sql_reader_query: any + :ivar sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. + This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with + resultType string). + :vartype sql_reader_stored_procedure_name: any + :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, + :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: any - :param partition_option: The partition mechanism that will be used for Sql read in parallel. + :ivar produce_additional_types: Which additional types to produce. + :vartype produce_additional_types: any + :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: any - :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings + :vartype partition_option: any + :ivar partition_settings: The settings that will be leveraged for Sql source partitioning. + :vartype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -39165,6 +52447,47 @@ def __init__( partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword sql_reader_query: SQL reader query. Type: string (or Expression with resultType + string). + :paramtype sql_reader_query: any + :keyword sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database + source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression + with resultType string). + :paramtype sql_reader_stored_procedure_name: any + :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :paramtype stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :keyword produce_additional_types: Which additional types to produce. + :paramtype produce_additional_types: any + :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :paramtype partition_option: any + :keyword partition_settings: The settings that will be leveraged for Sql source partitioning. + :paramtype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings + """ super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlServerSource' # type: str self.sql_reader_query = sql_reader_query @@ -39180,29 +52503,29 @@ class SqlServerStoredProcedureActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param stored_procedure_name: Required. Stored procedure name. Type: string (or Expression with + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar stored_procedure_name: Required. Stored procedure name. Type: string (or Expression with resultType string). - :type stored_procedure_name: any - :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + :vartype stored_procedure_name: any + :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, + :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] """ @@ -39239,6 +52562,30 @@ def __init__( stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword stored_procedure_name: Required. Stored procedure name. Type: string (or Expression + with resultType string). + :paramtype stored_procedure_name: any + :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :paramtype stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + """ super(SqlServerStoredProcedureActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'SqlServerStoredProcedure' # type: str self.stored_procedure_name = stored_procedure_name @@ -39250,37 +52597,37 @@ class SqlServerTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param schema_type_properties_schema: The schema name of the SQL Server dataset. Type: string + :vartype table_name: any + :ivar schema_type_properties_schema: The schema name of the SQL Server dataset. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any - :param table: The table name of the SQL Server dataset. Type: string (or Expression with + :vartype schema_type_properties_schema: any + :ivar table: The table name of the SQL Server dataset. Type: string (or Expression with resultType string). - :type table: any + :vartype table: any """ _validation = { @@ -39319,6 +52666,37 @@ def __init__( table: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword schema_type_properties_schema: The schema name of the SQL Server dataset. Type: string + (or Expression with resultType string). + :paramtype schema_type_properties_schema: any + :keyword table: The table name of the SQL Server dataset. Type: string (or Expression with + resultType string). + :paramtype table: any + """ super(SqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'SqlServerTable' # type: str self.table_name = table_name @@ -39331,55 +52709,55 @@ class SqlSink(CopySink): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy sink type.Constant filled by server. - :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy sink type.Constant filled by server. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: any - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + :vartype write_batch_size: any + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: any - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + :vartype write_batch_timeout: any + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: any - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + :vartype sink_retry_count: any + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the sink data + :vartype sink_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + :vartype disable_metrics_collection: any + :ivar sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: any - :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with - resultType string). - :type sql_writer_table_type: any - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + :vartype sql_writer_stored_procedure_name: any + :ivar sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType + string). + :vartype sql_writer_table_type: any + :ivar pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: any - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, + :vartype pre_copy_script: any + :ivar stored_procedure_parameters: SQL stored procedure parameters. + :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + :ivar stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). - :type stored_procedure_table_type_parameter_name: any - :param table_option: The option to handle sink table, such as autoCreate. For now only + :vartype stored_procedure_table_type_parameter_name: any + :ivar table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: any - :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + :vartype table_option: any + :ivar sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). - :type sql_writer_use_table_lock: any - :param write_behavior: Write behavior when copying data into sql. Type: SqlWriteBehaviorEnum - (or Expression with resultType SqlWriteBehaviorEnum). - :type write_behavior: any - :param upsert_settings: SQL upsert settings. - :type upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings + :vartype sql_writer_use_table_lock: any + :ivar write_behavior: Write behavior when copying data into sql. Type: SqlWriteBehaviorEnum (or + Expression with resultType SqlWriteBehaviorEnum). + :vartype write_behavior: any + :ivar upsert_settings: SQL upsert settings. + :vartype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings """ _validation = { @@ -39427,6 +52805,55 @@ def __init__( upsert_settings: Optional["SqlUpsertSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: any + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: any + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: any + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + Expression with resultType string). + :paramtype sql_writer_stored_procedure_name: any + :keyword sql_writer_table_type: SQL writer table type. Type: string (or Expression with + resultType string). + :paramtype sql_writer_table_type: any + :keyword pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :paramtype pre_copy_script: any + :keyword stored_procedure_parameters: SQL stored procedure parameters. + :paramtype stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :keyword stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + table type. Type: string (or Expression with resultType string). + :paramtype stored_procedure_table_type_parameter_name: any + :keyword table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :paramtype table_option: any + :keyword sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean + (or Expression with resultType boolean). + :paramtype sql_writer_use_table_lock: any + :keyword write_behavior: Write behavior when copying data into sql. Type: SqlWriteBehaviorEnum + (or Expression with resultType SqlWriteBehaviorEnum). + :paramtype write_behavior: any + :keyword upsert_settings: SQL upsert settings. + :paramtype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings + """ super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SqlSink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name @@ -39445,48 +52872,48 @@ class SqlSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: any - :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database - source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression - with resultType string). - :type sql_reader_stored_procedure_name: any - :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + :vartype additional_columns: any + :ivar sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :vartype sql_reader_query: any + :ivar sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. + This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with + resultType string). + :vartype sql_reader_stored_procedure_name: any + :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, + :vartype stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param isolation_level: Specifies the transaction locking behavior for the SQL source. Allowed + :ivar isolation_level: Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). - :type isolation_level: any - :param partition_option: The partition mechanism that will be used for Sql read in parallel. + :vartype isolation_level: any + :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: any - :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings + :vartype partition_option: any + :ivar partition_settings: The settings that will be leveraged for Sql source partitioning. + :vartype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { @@ -39528,6 +52955,49 @@ def __init__( partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword sql_reader_query: SQL reader query. Type: string (or Expression with resultType + string). + :paramtype sql_reader_query: any + :keyword sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database + source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression + with resultType string). + :paramtype sql_reader_stored_procedure_name: any + :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :paramtype stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :keyword isolation_level: Specifies the transaction locking behavior for the SQL source. + Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default + value is ReadCommitted. Type: string (or Expression with resultType string). + :paramtype isolation_level: any + :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :paramtype partition_option: any + :keyword partition_settings: The settings that will be leveraged for Sql source partitioning. + :paramtype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings + """ super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlSource' # type: str self.sql_reader_query = sql_reader_query @@ -39541,15 +53011,15 @@ def __init__( class SqlUpsertSettings(msrest.serialization.Model): """Sql upsert option settings. - :param use_temp_db: Specifies whether to use temp db for upsert interim table. Type: boolean - (or Expression with resultType boolean). - :type use_temp_db: any - :param interim_schema_name: Schema name for interim table. Type: string (or Expression with + :ivar use_temp_db: Specifies whether to use temp db for upsert interim table. Type: boolean (or + Expression with resultType boolean). + :vartype use_temp_db: any + :ivar interim_schema_name: Schema name for interim table. Type: string (or Expression with resultType string). - :type interim_schema_name: any - :param keys: Key column names for unique row identification. Type: array of strings (or + :vartype interim_schema_name: any + :ivar keys: Key column names for unique row identification. Type: array of strings (or Expression with resultType array of strings). - :type keys: any + :vartype keys: any """ _attribute_map = { @@ -39566,6 +53036,17 @@ def __init__( keys: Optional[Any] = None, **kwargs ): + """ + :keyword use_temp_db: Specifies whether to use temp db for upsert interim table. Type: boolean + (or Expression with resultType boolean). + :paramtype use_temp_db: any + :keyword interim_schema_name: Schema name for interim table. Type: string (or Expression with + resultType string). + :paramtype interim_schema_name: any + :keyword keys: Key column names for unique row identification. Type: array of strings (or + Expression with resultType array of strings). + :paramtype keys: any + """ super(SqlUpsertSettings, self).__init__(**kwargs) self.use_temp_db = use_temp_db self.interim_schema_name = interim_schema_name @@ -39577,45 +53058,45 @@ class SquareLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_properties: Properties used to connect to Square. It is mutually exclusive + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_properties: Properties used to connect to Square. It is mutually exclusive with any other properties in the linked service. Type: object. - :type connection_properties: any - :param host: The URL of the Square instance. (i.e. mystore.mysquare.com). - :type host: any - :param client_id: The client ID associated with your Square application. - :type client_id: any - :param client_secret: The client secret associated with your Square application. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param redirect_uri: The redirect URL assigned in the Square application dashboard. (i.e. + :vartype connection_properties: any + :ivar host: The URL of the Square instance. (i.e. mystore.mysquare.com). + :vartype host: any + :ivar client_id: The client ID associated with your Square application. + :vartype client_id: any + :ivar client_secret: The client secret associated with your Square application. + :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar redirect_uri: The redirect URL assigned in the Square application dashboard. (i.e. http://localhost:2500). - :type redirect_uri: any - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype redirect_uri: any + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -39659,6 +53140,45 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_properties: Properties used to connect to Square. It is mutually exclusive + with any other properties in the linked service. Type: object. + :paramtype connection_properties: any + :keyword host: The URL of the Square instance. (i.e. mystore.mysquare.com). + :paramtype host: any + :keyword client_id: The client ID associated with your Square application. + :paramtype client_id: any + :keyword client_secret: The client secret associated with your Square application. + :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword redirect_uri: The redirect URL assigned in the Square application dashboard. (i.e. + http://localhost:2500). + :paramtype redirect_uri: any + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(SquareLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Square' # type: str self.connection_properties = connection_properties @@ -39677,30 +53197,30 @@ class SquareObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -39735,6 +53255,30 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(SquareObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'SquareObject' # type: str self.table_name = table_name @@ -39745,32 +53289,32 @@ class SquareSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -39802,6 +53346,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SquareSource' # type: str self.query = query @@ -39812,12 +53382,12 @@ class SSISAccessCredential(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param domain: Required. Domain for windows authentication. - :type domain: any - :param user_name: Required. UseName for windows authentication. - :type user_name: any - :param password: Required. Password for windows authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase + :ivar domain: Required. Domain for windows authentication. + :vartype domain: any + :ivar user_name: Required. UseName for windows authentication. + :vartype user_name: any + :ivar password: Required. Password for windows authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -39840,6 +53410,14 @@ def __init__( password: "SecretBase", **kwargs ): + """ + :keyword domain: Required. Domain for windows authentication. + :paramtype domain: any + :keyword user_name: Required. UseName for windows authentication. + :paramtype user_name: any + :keyword password: Required. Password for windows authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + """ super(SSISAccessCredential, self).__init__(**kwargs) self.domain = domain self.user_name = user_name @@ -39851,16 +53429,16 @@ class SSISChildPackage(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param package_path: Required. Path for embedded child package. Type: string (or Expression - with resultType string). - :type package_path: any - :param package_name: Name for embedded child package. - :type package_name: str - :param package_content: Required. Content for embedded child package. Type: string (or + :ivar package_path: Required. Path for embedded child package. Type: string (or Expression with + resultType string). + :vartype package_path: any + :ivar package_name: Name for embedded child package. + :vartype package_name: str + :ivar package_content: Required. Content for embedded child package. Type: string (or Expression with resultType string). - :type package_content: any - :param package_last_modified_date: Last modified date for embedded child package. - :type package_last_modified_date: str + :vartype package_content: any + :ivar package_last_modified_date: Last modified date for embedded child package. + :vartype package_last_modified_date: str """ _validation = { @@ -39884,6 +53462,18 @@ def __init__( package_last_modified_date: Optional[str] = None, **kwargs ): + """ + :keyword package_path: Required. Path for embedded child package. Type: string (or Expression + with resultType string). + :paramtype package_path: any + :keyword package_name: Name for embedded child package. + :paramtype package_name: str + :keyword package_content: Required. Content for embedded child package. Type: string (or + Expression with resultType string). + :paramtype package_content: any + :keyword package_last_modified_date: Last modified date for embedded child package. + :paramtype package_last_modified_date: str + """ super(SSISChildPackage, self).__init__(**kwargs) self.package_path = package_path self.package_name = package_name @@ -39899,15 +53489,15 @@ class SsisObjectMetadata(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of metadata.Constant filled by server. Possible values include: + :ivar type: Required. Type of metadata.Constant filled by server. Possible values include: "Folder", "Project", "Package", "Environment". - :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str + :vartype type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType + :ivar id: Metadata id. + :vartype id: long + :ivar name: Metadata name. + :vartype name: str + :ivar description: Metadata description. + :vartype description: str """ _validation = { @@ -39933,6 +53523,14 @@ def __init__( description: Optional[str] = None, **kwargs ): + """ + :keyword id: Metadata id. + :paramtype id: long + :keyword name: Metadata name. + :paramtype name: str + :keyword description: Metadata description. + :paramtype description: str + """ super(SsisObjectMetadata, self).__init__(**kwargs) self.type = None # type: Optional[str] self.id = id @@ -39945,19 +53543,19 @@ class SsisEnvironment(SsisObjectMetadata): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of metadata.Constant filled by server. Possible values include: + :ivar type: Required. Type of metadata.Constant filled by server. Possible values include: "Folder", "Project", "Package", "Environment". - :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param folder_id: Folder id which contains environment. - :type folder_id: long - :param variables: Variable in environment. - :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] + :vartype type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType + :ivar id: Metadata id. + :vartype id: long + :ivar name: Metadata name. + :vartype name: str + :ivar description: Metadata description. + :vartype description: str + :ivar folder_id: Folder id which contains environment. + :vartype folder_id: long + :ivar variables: Variable in environment. + :vartype variables: list[~azure.mgmt.datafactory.models.SsisVariable] """ _validation = { @@ -39983,6 +53581,18 @@ def __init__( variables: Optional[List["SsisVariable"]] = None, **kwargs ): + """ + :keyword id: Metadata id. + :paramtype id: long + :keyword name: Metadata name. + :paramtype name: str + :keyword description: Metadata description. + :paramtype description: str + :keyword folder_id: Folder id which contains environment. + :paramtype folder_id: long + :keyword variables: Variable in environment. + :paramtype variables: list[~azure.mgmt.datafactory.models.SsisVariable] + """ super(SsisEnvironment, self).__init__(id=id, name=name, description=description, **kwargs) self.type = 'Environment' # type: str self.folder_id = folder_id @@ -39992,14 +53602,14 @@ def __init__( class SsisEnvironmentReference(msrest.serialization.Model): """Ssis environment reference. - :param id: Environment reference id. - :type id: long - :param environment_folder_name: Environment folder name. - :type environment_folder_name: str - :param environment_name: Environment name. - :type environment_name: str - :param reference_type: Reference type. - :type reference_type: str + :ivar id: Environment reference id. + :vartype id: long + :ivar environment_folder_name: Environment folder name. + :vartype environment_folder_name: str + :ivar environment_name: Environment name. + :vartype environment_name: str + :ivar reference_type: Reference type. + :vartype reference_type: str """ _attribute_map = { @@ -40018,6 +53628,16 @@ def __init__( reference_type: Optional[str] = None, **kwargs ): + """ + :keyword id: Environment reference id. + :paramtype id: long + :keyword environment_folder_name: Environment folder name. + :paramtype environment_folder_name: str + :keyword environment_name: Environment name. + :paramtype environment_name: str + :keyword reference_type: Reference type. + :paramtype reference_type: str + """ super(SsisEnvironmentReference, self).__init__(**kwargs) self.id = id self.environment_folder_name = environment_folder_name @@ -40030,12 +53650,12 @@ class SSISExecutionCredential(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param domain: Required. Domain for windows authentication. - :type domain: any - :param user_name: Required. UseName for windows authentication. - :type user_name: any - :param password: Required. Password for windows authentication. - :type password: ~azure.mgmt.datafactory.models.SecureString + :ivar domain: Required. Domain for windows authentication. + :vartype domain: any + :ivar user_name: Required. UseName for windows authentication. + :vartype user_name: any + :ivar password: Required. Password for windows authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecureString """ _validation = { @@ -40058,6 +53678,14 @@ def __init__( password: "SecureString", **kwargs ): + """ + :keyword domain: Required. Domain for windows authentication. + :paramtype domain: any + :keyword user_name: Required. UseName for windows authentication. + :paramtype user_name: any + :keyword password: Required. Password for windows authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecureString + """ super(SSISExecutionCredential, self).__init__(**kwargs) self.domain = domain self.user_name = user_name @@ -40069,9 +53697,9 @@ class SSISExecutionParameter(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. SSIS package execution parameter value. Type: string (or Expression - with resultType string). - :type value: any + :ivar value: Required. SSIS package execution parameter value. Type: string (or Expression with + resultType string). + :vartype value: any """ _validation = { @@ -40088,6 +53716,11 @@ def __init__( value: Any, **kwargs ): + """ + :keyword value: Required. SSIS package execution parameter value. Type: string (or Expression + with resultType string). + :paramtype value: any + """ super(SSISExecutionParameter, self).__init__(**kwargs) self.value = value @@ -40097,15 +53730,15 @@ class SsisFolder(SsisObjectMetadata): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of metadata.Constant filled by server. Possible values include: + :ivar type: Required. Type of metadata.Constant filled by server. Possible values include: "Folder", "Project", "Package", "Environment". - :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str + :vartype type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType + :ivar id: Metadata id. + :vartype id: long + :ivar name: Metadata name. + :vartype name: str + :ivar description: Metadata description. + :vartype description: str """ _validation = { @@ -40127,6 +53760,14 @@ def __init__( description: Optional[str] = None, **kwargs ): + """ + :keyword id: Metadata id. + :paramtype id: long + :keyword name: Metadata name. + :paramtype name: str + :keyword description: Metadata description. + :paramtype description: str + """ super(SsisFolder, self).__init__(id=id, name=name, description=description, **kwargs) self.type = 'Folder' # type: str @@ -40136,17 +53777,17 @@ class SSISLogLocation(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param log_path: Required. The SSIS package execution log path. Type: string (or Expression - with resultType string). - :type log_path: any - :param type: Required. The type of SSIS log location. Possible values include: "File". - :type type: str or ~azure.mgmt.datafactory.models.SsisLogLocationType - :param access_credential: The package execution log access credential. - :type access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential - :param log_refresh_interval: Specifies the interval to refresh log. The default interval is 5 + :ivar log_path: Required. The SSIS package execution log path. Type: string (or Expression with + resultType string). + :vartype log_path: any + :ivar type: Required. The type of SSIS log location. Possible values include: "File". + :vartype type: str or ~azure.mgmt.datafactory.models.SsisLogLocationType + :ivar access_credential: The package execution log access credential. + :vartype access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential + :ivar log_refresh_interval: Specifies the interval to refresh log. The default interval is 5 minutes. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type log_refresh_interval: any + :vartype log_refresh_interval: any """ _validation = { @@ -40170,6 +53811,19 @@ def __init__( log_refresh_interval: Optional[Any] = None, **kwargs ): + """ + :keyword log_path: Required. The SSIS package execution log path. Type: string (or Expression + with resultType string). + :paramtype log_path: any + :keyword type: Required. The type of SSIS log location. Possible values include: "File". + :paramtype type: str or ~azure.mgmt.datafactory.models.SsisLogLocationType + :keyword access_credential: The package execution log access credential. + :paramtype access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential + :keyword log_refresh_interval: Specifies the interval to refresh log. The default interval is 5 + minutes. Type: string (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype log_refresh_interval: any + """ super(SSISLogLocation, self).__init__(**kwargs) self.log_path = log_path self.type = type @@ -40180,10 +53834,10 @@ def __init__( class SsisObjectMetadataListResponse(msrest.serialization.Model): """A list of SSIS object metadata. - :param value: List of SSIS object metadata. - :type value: list[~azure.mgmt.datafactory.models.SsisObjectMetadata] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: List of SSIS object metadata. + :vartype value: list[~azure.mgmt.datafactory.models.SsisObjectMetadata] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _attribute_map = { @@ -40198,6 +53852,12 @@ def __init__( next_link: Optional[str] = None, **kwargs ): + """ + :keyword value: List of SSIS object metadata. + :paramtype value: list[~azure.mgmt.datafactory.models.SsisObjectMetadata] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(SsisObjectMetadataListResponse, self).__init__(**kwargs) self.value = value self.next_link = next_link @@ -40206,14 +53866,14 @@ def __init__( class SsisObjectMetadataStatusResponse(msrest.serialization.Model): """The status of the operation. - :param status: The status of the operation. - :type status: str - :param name: The operation name. - :type name: str - :param properties: The operation properties. - :type properties: str - :param error: The operation error message. - :type error: str + :ivar status: The status of the operation. + :vartype status: str + :ivar name: The operation name. + :vartype name: str + :ivar properties: The operation properties. + :vartype properties: str + :ivar error: The operation error message. + :vartype error: str """ _attribute_map = { @@ -40232,6 +53892,16 @@ def __init__( error: Optional[str] = None, **kwargs ): + """ + :keyword status: The status of the operation. + :paramtype status: str + :keyword name: The operation name. + :paramtype name: str + :keyword properties: The operation properties. + :paramtype properties: str + :keyword error: The operation error message. + :paramtype error: str + """ super(SsisObjectMetadataStatusResponse, self).__init__(**kwargs) self.status = status self.name = name @@ -40244,23 +53914,23 @@ class SsisPackage(SsisObjectMetadata): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of metadata.Constant filled by server. Possible values include: + :ivar type: Required. Type of metadata.Constant filled by server. Possible values include: "Folder", "Project", "Package", "Environment". - :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param folder_id: Folder id which contains package. - :type folder_id: long - :param project_version: Project version which contains package. - :type project_version: long - :param project_id: Project id which contains package. - :type project_id: long - :param parameters: Parameters in package. - :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + :vartype type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType + :ivar id: Metadata id. + :vartype id: long + :ivar name: Metadata name. + :vartype name: str + :ivar description: Metadata description. + :vartype description: str + :ivar folder_id: Folder id which contains package. + :vartype folder_id: long + :ivar project_version: Project version which contains package. + :vartype project_version: long + :ivar project_id: Project id which contains package. + :vartype project_id: long + :ivar parameters: Parameters in package. + :vartype parameters: list[~azure.mgmt.datafactory.models.SsisParameter] """ _validation = { @@ -40290,6 +53960,22 @@ def __init__( parameters: Optional[List["SsisParameter"]] = None, **kwargs ): + """ + :keyword id: Metadata id. + :paramtype id: long + :keyword name: Metadata name. + :paramtype name: str + :keyword description: Metadata description. + :paramtype description: str + :keyword folder_id: Folder id which contains package. + :paramtype folder_id: long + :keyword project_version: Project version which contains package. + :paramtype project_version: long + :keyword project_id: Project id which contains package. + :paramtype project_id: long + :keyword parameters: Parameters in package. + :paramtype parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ super(SsisPackage, self).__init__(id=id, name=name, description=description, **kwargs) self.type = 'Package' # type: str self.folder_id = folder_id @@ -40301,30 +53987,29 @@ def __init__( class SSISPackageLocation(msrest.serialization.Model): """SSIS package location. - :param package_path: The SSIS package path. Type: string (or Expression with resultType - string). - :type package_path: any - :param type: The type of SSIS package location. Possible values include: "SSISDB", "File", + :ivar package_path: The SSIS package path. Type: string (or Expression with resultType string). + :vartype package_path: any + :ivar type: The type of SSIS package location. Possible values include: "SSISDB", "File", "InlinePackage", "PackageStore". - :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType - :param package_password: Password of the package. - :type package_password: ~azure.mgmt.datafactory.models.SecretBase - :param access_credential: The package access credential. - :type access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential - :param configuration_path: The configuration file of the package execution. Type: string (or + :vartype type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType + :ivar package_password: Password of the package. + :vartype package_password: ~azure.mgmt.datafactory.models.SecretBase + :ivar access_credential: The package access credential. + :vartype access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential + :ivar configuration_path: The configuration file of the package execution. Type: string (or Expression with resultType string). - :type configuration_path: any - :param configuration_access_credential: The configuration file access credential. - :type configuration_access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential - :param package_name: The package name. - :type package_name: str - :param package_content: The embedded package content. Type: string (or Expression with + :vartype configuration_path: any + :ivar configuration_access_credential: The configuration file access credential. + :vartype configuration_access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential + :ivar package_name: The package name. + :vartype package_name: str + :ivar package_content: The embedded package content. Type: string (or Expression with resultType string). - :type package_content: any - :param package_last_modified_date: The embedded package last modified date. - :type package_last_modified_date: str - :param child_packages: The embedded child package list. - :type child_packages: list[~azure.mgmt.datafactory.models.SSISChildPackage] + :vartype package_content: any + :ivar package_last_modified_date: The embedded package last modified date. + :vartype package_last_modified_date: str + :ivar child_packages: The embedded child package list. + :vartype child_packages: list[~azure.mgmt.datafactory.models.SSISChildPackage] """ _attribute_map = { @@ -40355,6 +54040,32 @@ def __init__( child_packages: Optional[List["SSISChildPackage"]] = None, **kwargs ): + """ + :keyword package_path: The SSIS package path. Type: string (or Expression with resultType + string). + :paramtype package_path: any + :keyword type: The type of SSIS package location. Possible values include: "SSISDB", "File", + "InlinePackage", "PackageStore". + :paramtype type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType + :keyword package_password: Password of the package. + :paramtype package_password: ~azure.mgmt.datafactory.models.SecretBase + :keyword access_credential: The package access credential. + :paramtype access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential + :keyword configuration_path: The configuration file of the package execution. Type: string (or + Expression with resultType string). + :paramtype configuration_path: any + :keyword configuration_access_credential: The configuration file access credential. + :paramtype configuration_access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential + :keyword package_name: The package name. + :paramtype package_name: str + :keyword package_content: The embedded package content. Type: string (or Expression with + resultType string). + :paramtype package_content: any + :keyword package_last_modified_date: The embedded package last modified date. + :paramtype package_last_modified_date: str + :keyword child_packages: The embedded child package list. + :paramtype child_packages: list[~azure.mgmt.datafactory.models.SSISChildPackage] + """ super(SSISPackageLocation, self).__init__(**kwargs) self.package_path = package_path self.type = type @@ -40371,30 +54082,30 @@ def __init__( class SsisParameter(msrest.serialization.Model): """Ssis parameter. - :param id: Parameter id. - :type id: long - :param name: Parameter name. - :type name: str - :param description: Parameter description. - :type description: str - :param data_type: Parameter type. - :type data_type: str - :param required: Whether parameter is required. - :type required: bool - :param sensitive: Whether parameter is sensitive. - :type sensitive: bool - :param design_default_value: Design default value of parameter. - :type design_default_value: str - :param default_value: Default value of parameter. - :type default_value: str - :param sensitive_default_value: Default sensitive value of parameter. - :type sensitive_default_value: str - :param value_type: Parameter value type. - :type value_type: str - :param value_set: Parameter value set. - :type value_set: bool - :param variable: Parameter reference variable. - :type variable: str + :ivar id: Parameter id. + :vartype id: long + :ivar name: Parameter name. + :vartype name: str + :ivar description: Parameter description. + :vartype description: str + :ivar data_type: Parameter type. + :vartype data_type: str + :ivar required: Whether parameter is required. + :vartype required: bool + :ivar sensitive: Whether parameter is sensitive. + :vartype sensitive: bool + :ivar design_default_value: Design default value of parameter. + :vartype design_default_value: str + :ivar default_value: Default value of parameter. + :vartype default_value: str + :ivar sensitive_default_value: Default sensitive value of parameter. + :vartype sensitive_default_value: str + :ivar value_type: Parameter value type. + :vartype value_type: str + :ivar value_set: Parameter value set. + :vartype value_set: bool + :ivar variable: Parameter reference variable. + :vartype variable: str """ _attribute_map = { @@ -40429,6 +54140,32 @@ def __init__( variable: Optional[str] = None, **kwargs ): + """ + :keyword id: Parameter id. + :paramtype id: long + :keyword name: Parameter name. + :paramtype name: str + :keyword description: Parameter description. + :paramtype description: str + :keyword data_type: Parameter type. + :paramtype data_type: str + :keyword required: Whether parameter is required. + :paramtype required: bool + :keyword sensitive: Whether parameter is sensitive. + :paramtype sensitive: bool + :keyword design_default_value: Design default value of parameter. + :paramtype design_default_value: str + :keyword default_value: Default value of parameter. + :paramtype default_value: str + :keyword sensitive_default_value: Default sensitive value of parameter. + :paramtype sensitive_default_value: str + :keyword value_type: Parameter value type. + :paramtype value_type: str + :keyword value_set: Parameter value set. + :paramtype value_set: bool + :keyword variable: Parameter reference variable. + :paramtype variable: str + """ super(SsisParameter, self).__init__(**kwargs) self.id = id self.name = name @@ -40449,23 +54186,23 @@ class SsisProject(SsisObjectMetadata): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of metadata.Constant filled by server. Possible values include: + :ivar type: Required. Type of metadata.Constant filled by server. Possible values include: "Folder", "Project", "Package", "Environment". - :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param folder_id: Folder id which contains project. - :type folder_id: long - :param version: Project version. - :type version: long - :param environment_refs: Environment reference in project. - :type environment_refs: list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] - :param parameters: Parameters in project. - :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + :vartype type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType + :ivar id: Metadata id. + :vartype id: long + :ivar name: Metadata name. + :vartype name: str + :ivar description: Metadata description. + :vartype description: str + :ivar folder_id: Folder id which contains project. + :vartype folder_id: long + :ivar version: Project version. + :vartype version: long + :ivar environment_refs: Environment reference in project. + :vartype environment_refs: list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] + :ivar parameters: Parameters in project. + :vartype parameters: list[~azure.mgmt.datafactory.models.SsisParameter] """ _validation = { @@ -40495,6 +54232,22 @@ def __init__( parameters: Optional[List["SsisParameter"]] = None, **kwargs ): + """ + :keyword id: Metadata id. + :paramtype id: long + :keyword name: Metadata name. + :paramtype name: str + :keyword description: Metadata description. + :paramtype description: str + :keyword folder_id: Folder id which contains project. + :paramtype folder_id: long + :keyword version: Project version. + :paramtype version: long + :keyword environment_refs: Environment reference in project. + :paramtype environment_refs: list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] + :keyword parameters: Parameters in project. + :paramtype parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ super(SsisProject, self).__init__(id=id, name=name, description=description, **kwargs) self.type = 'Project' # type: str self.folder_id = folder_id @@ -40508,12 +54261,12 @@ class SSISPropertyOverride(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. SSIS package property override value. Type: string (or Expression with + :ivar value: Required. SSIS package property override value. Type: string (or Expression with resultType string). - :type value: any - :param is_sensitive: Whether SSIS package property override value is sensitive data. Value will + :vartype value: any + :ivar is_sensitive: Whether SSIS package property override value is sensitive data. Value will be encrypted in SSISDB if it is true. - :type is_sensitive: bool + :vartype is_sensitive: bool """ _validation = { @@ -40532,6 +54285,14 @@ def __init__( is_sensitive: Optional[bool] = None, **kwargs ): + """ + :keyword value: Required. SSIS package property override value. Type: string (or Expression + with resultType string). + :paramtype value: any + :keyword is_sensitive: Whether SSIS package property override value is sensitive data. Value + will be encrypted in SSISDB if it is true. + :paramtype is_sensitive: bool + """ super(SSISPropertyOverride, self).__init__(**kwargs) self.value = value self.is_sensitive = is_sensitive @@ -40540,20 +54301,20 @@ def __init__( class SsisVariable(msrest.serialization.Model): """Ssis variable. - :param id: Variable id. - :type id: long - :param name: Variable name. - :type name: str - :param description: Variable description. - :type description: str - :param data_type: Variable type. - :type data_type: str - :param sensitive: Whether variable is sensitive. - :type sensitive: bool - :param value: Variable value. - :type value: str - :param sensitive_value: Variable sensitive value. - :type sensitive_value: str + :ivar id: Variable id. + :vartype id: long + :ivar name: Variable name. + :vartype name: str + :ivar description: Variable description. + :vartype description: str + :ivar data_type: Variable type. + :vartype data_type: str + :ivar sensitive: Whether variable is sensitive. + :vartype sensitive: bool + :ivar value: Variable value. + :vartype value: str + :ivar sensitive_value: Variable sensitive value. + :vartype sensitive_value: str """ _attribute_map = { @@ -40578,6 +54339,22 @@ def __init__( sensitive_value: Optional[str] = None, **kwargs ): + """ + :keyword id: Variable id. + :paramtype id: long + :keyword name: Variable name. + :paramtype name: str + :keyword description: Variable description. + :paramtype description: str + :keyword data_type: Variable type. + :paramtype data_type: str + :keyword sensitive: Whether variable is sensitive. + :paramtype sensitive: bool + :keyword value: Variable value. + :paramtype value: str + :keyword sensitive_value: Variable sensitive value. + :paramtype sensitive_value: str + """ super(SsisVariable, self).__init__(**kwargs) self.id = id self.name = name @@ -40593,17 +54370,17 @@ class StagingSettings(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param linked_service_name: Required. Staging linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param path: The path to storage for storing the interim data. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar linked_service_name: Required. Staging linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar path: The path to storage for storing the interim data. Type: string (or Expression with resultType string). - :type path: any - :param enable_compression: Specifies whether to use compression when copying data via an - interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). - :type enable_compression: any + :vartype path: any + :ivar enable_compression: Specifies whether to use compression when copying data via an interim + staging. Default value is false. Type: boolean (or Expression with resultType boolean). + :vartype enable_compression: any """ _validation = { @@ -40626,6 +54403,19 @@ def __init__( enable_compression: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword linked_service_name: Required. Staging linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword path: The path to storage for storing the interim data. Type: string (or Expression + with resultType string). + :paramtype path: any + :keyword enable_compression: Specifies whether to use compression when copying data via an + interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). + :paramtype enable_compression: any + """ super(StagingSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.linked_service_name = linked_service_name @@ -40636,12 +54426,12 @@ def __init__( class StoredProcedureParameter(msrest.serialization.Model): """SQL stored procedure parameter. - :param value: Stored procedure parameter value. Type: string (or Expression with resultType + :ivar value: Stored procedure parameter value. Type: string (or Expression with resultType string). - :type value: any - :param type: Stored procedure parameter type. Possible values include: "String", "Int", - "Int64", "Decimal", "Guid", "Boolean", "Date". - :type type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType + :vartype value: any + :ivar type: Stored procedure parameter type. Possible values include: "String", "Int", "Int64", + "Decimal", "Guid", "Boolean", "Date". + :vartype type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType """ _attribute_map = { @@ -40656,6 +54446,14 @@ def __init__( type: Optional[Union[str, "StoredProcedureParameterType"]] = None, **kwargs ): + """ + :keyword value: Stored procedure parameter value. Type: string (or Expression with resultType + string). + :paramtype value: any + :keyword type: Stored procedure parameter type. Possible values include: "String", "Int", + "Int64", "Decimal", "Guid", "Boolean", "Date". + :paramtype type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType + """ super(StoredProcedureParameter, self).__init__(**kwargs) self.value = value self.type = type @@ -40666,29 +54464,29 @@ class SwitchActivity(ControlActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param on: Required. An expression that would evaluate to a string or integer. This is used to + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar on: Required. An expression that would evaluate to a string or integer. This is used to determine the block of activities in cases that will be executed. - :type on: ~azure.mgmt.datafactory.models.Expression - :param cases: List of cases that correspond to expected values of the 'on' property. This is an + :vartype on: ~azure.mgmt.datafactory.models.Expression + :ivar cases: List of cases that correspond to expected values of the 'on' property. This is an optional property and if not provided, the activity will execute activities provided in defaultActivities. - :type cases: list[~azure.mgmt.datafactory.models.SwitchCase] - :param default_activities: List of activities to execute if no case condition is satisfied. - This is an optional property and if not provided, the activity will exit without any action. - :type default_activities: list[~azure.mgmt.datafactory.models.Activity] + :vartype cases: list[~azure.mgmt.datafactory.models.SwitchCase] + :ivar default_activities: List of activities to execute if no case condition is satisfied. This + is an optional property and if not provided, the activity will exit without any action. + :vartype default_activities: list[~azure.mgmt.datafactory.models.Activity] """ _validation = { @@ -40722,6 +54520,29 @@ def __init__( default_activities: Optional[List["Activity"]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword on: Required. An expression that would evaluate to a string or integer. This is used + to determine the block of activities in cases that will be executed. + :paramtype on: ~azure.mgmt.datafactory.models.Expression + :keyword cases: List of cases that correspond to expected values of the 'on' property. This is + an optional property and if not provided, the activity will execute activities provided in + defaultActivities. + :paramtype cases: list[~azure.mgmt.datafactory.models.SwitchCase] + :keyword default_activities: List of activities to execute if no case condition is satisfied. + This is an optional property and if not provided, the activity will exit without any action. + :paramtype default_activities: list[~azure.mgmt.datafactory.models.Activity] + """ super(SwitchActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.type = 'Switch' # type: str self.on = on @@ -40732,10 +54553,10 @@ def __init__( class SwitchCase(msrest.serialization.Model): """Switch cases with have a value and corresponding activities. - :param value: Expected value that satisfies the expression result of the 'on' property. - :type value: str - :param activities: List of activities to execute for satisfied case condition. - :type activities: list[~azure.mgmt.datafactory.models.Activity] + :ivar value: Expected value that satisfies the expression result of the 'on' property. + :vartype value: str + :ivar activities: List of activities to execute for satisfied case condition. + :vartype activities: list[~azure.mgmt.datafactory.models.Activity] """ _attribute_map = { @@ -40750,6 +54571,12 @@ def __init__( activities: Optional[List["Activity"]] = None, **kwargs ): + """ + :keyword value: Expected value that satisfies the expression result of the 'on' property. + :paramtype value: str + :keyword activities: List of activities to execute for satisfied case condition. + :paramtype activities: list[~azure.mgmt.datafactory.models.Activity] + """ super(SwitchCase, self).__init__(**kwargs) self.value = value self.activities = activities @@ -40760,39 +54587,39 @@ class SybaseLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param server: Required. Server name for connection. Type: string (or Expression with - resultType string). - :type server: any - :param database: Required. Database name for connection. Type: string (or Expression with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar server: Required. Server name for connection. Type: string (or Expression with resultType + string). + :vartype server: any + :ivar database: Required. Database name for connection. Type: string (or Expression with resultType string). - :type database: any - :param schema: Schema name for connection. Type: string (or Expression with resultType string). - :type schema: any - :param authentication_type: AuthenticationType to be used for connection. Possible values + :vartype database: any + :ivar schema: Schema name for connection. Type: string (or Expression with resultType string). + :vartype schema: any + :ivar authentication_type: AuthenticationType to be used for connection. Possible values include: "Basic", "Windows". - :type authentication_type: str or ~azure.mgmt.datafactory.models.SybaseAuthenticationType - :param username: Username for authentication. Type: string (or Expression with resultType + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.SybaseAuthenticationType + :ivar username: Username for authentication. Type: string (or Expression with resultType string). - :type username: any - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype username: any + :ivar password: Password for authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -40834,6 +54661,40 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword server: Required. Server name for connection. Type: string (or Expression with + resultType string). + :paramtype server: any + :keyword database: Required. Database name for connection. Type: string (or Expression with + resultType string). + :paramtype database: any + :keyword schema: Schema name for connection. Type: string (or Expression with resultType + string). + :paramtype schema: any + :keyword authentication_type: AuthenticationType to be used for connection. Possible values + include: "Basic", "Windows". + :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.SybaseAuthenticationType + :keyword username: Username for authentication. Type: string (or Expression with resultType + string). + :paramtype username: any + :keyword password: Password for authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(SybaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Sybase' # type: str self.server = server @@ -40850,31 +54711,31 @@ class SybaseSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: Database query. Type: string (or Expression with resultType string). - :type query: any + :vartype additional_columns: any + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: any """ _validation = { @@ -40906,6 +54767,31 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: any + """ super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SybaseSource' # type: str self.query = query @@ -40916,30 +54802,30 @@ class SybaseTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The Sybase table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The Sybase table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -40974,6 +54860,31 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The Sybase table name. Type: string (or Expression with resultType + string). + :paramtype table_name: any + """ super(SybaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'SybaseTable' # type: str self.table_name = table_name @@ -40984,37 +54895,37 @@ class TabularTranslator(CopyTranslator): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy translator type.Constant filled by server. - :type type: str - :param column_mappings: Column mappings. Example: "UserId: MyUserId, Group: MyGroup, Name: + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy translator type.Constant filled by server. + :vartype type: str + :ivar column_mappings: Column mappings. Example: "UserId: MyUserId, Group: MyGroup, Name: MyName" Type: string (or Expression with resultType string). This property will be retired. Please use mappings property. - :type column_mappings: any - :param schema_mapping: The schema mapping to map between tabular data and hierarchical data. + :vartype column_mappings: any + :ivar schema_mapping: The schema mapping to map between tabular data and hierarchical data. Example: {"Column1": "$.Column1", "Column2": "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or Expression with resultType object). This property will be retired. Please use mappings property. - :type schema_mapping: any - :param collection_reference: The JSON Path of the Nested Array that is going to do cross-apply. + :vartype schema_mapping: any + :ivar collection_reference: The JSON Path of the Nested Array that is going to do cross-apply. Type: object (or Expression with resultType object). - :type collection_reference: any - :param map_complex_values_to_string: Whether to map complex (array and object) values to simple + :vartype collection_reference: any + :ivar map_complex_values_to_string: Whether to map complex (array and object) values to simple strings in json format. Type: boolean (or Expression with resultType boolean). - :type map_complex_values_to_string: any - :param mappings: Column mappings with logical types. Tabular->tabular example: + :vartype map_complex_values_to_string: any + :ivar mappings: Column mappings with logical types. Tabular->tabular example: [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. Hierarchical->tabular example: [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. Type: object (or Expression with resultType object). - :type mappings: any - :param type_conversion: Whether to enable the advanced type conversion feature in the Copy + :vartype mappings: any + :ivar type_conversion: Whether to enable the advanced type conversion feature in the Copy activity. Type: boolean (or Expression with resultType boolean). - :type type_conversion: any - :param type_conversion_settings: Type conversion settings. - :type type_conversion_settings: ~azure.mgmt.datafactory.models.TypeConversionSettings + :vartype type_conversion: any + :ivar type_conversion_settings: Type conversion settings. + :vartype type_conversion_settings: ~azure.mgmt.datafactory.models.TypeConversionSettings """ _validation = { @@ -41046,6 +54957,37 @@ def __init__( type_conversion_settings: Optional["TypeConversionSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword column_mappings: Column mappings. Example: "UserId: MyUserId, Group: MyGroup, Name: + MyName" Type: string (or Expression with resultType string). This property will be retired. + Please use mappings property. + :paramtype column_mappings: any + :keyword schema_mapping: The schema mapping to map between tabular data and hierarchical data. + Example: {"Column1": "$.Column1", "Column2": "$.Column2.Property1", "Column3": + "$.Column2.Property2"}. Type: object (or Expression with resultType object). This property will + be retired. Please use mappings property. + :paramtype schema_mapping: any + :keyword collection_reference: The JSON Path of the Nested Array that is going to do + cross-apply. Type: object (or Expression with resultType object). + :paramtype collection_reference: any + :keyword map_complex_values_to_string: Whether to map complex (array and object) values to + simple strings in json format. Type: boolean (or Expression with resultType boolean). + :paramtype map_complex_values_to_string: any + :keyword mappings: Column mappings with logical types. Tabular->tabular example: + [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. + Hierarchical->tabular example: + [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. + Type: object (or Expression with resultType object). + :paramtype mappings: any + :keyword type_conversion: Whether to enable the advanced type conversion feature in the Copy + activity. Type: boolean (or Expression with resultType boolean). + :paramtype type_conversion: any + :keyword type_conversion_settings: Type conversion settings. + :paramtype type_conversion_settings: ~azure.mgmt.datafactory.models.TypeConversionSettings + """ super(TabularTranslator, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'TabularTranslator' # type: str self.column_mappings = column_mappings @@ -41062,14 +55004,14 @@ class TarGZipReadSettings(CompressionReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The Compression setting type.Constant filled by server. - :type type: str - :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder + :vartype additional_properties: dict[str, any] + :ivar type: Required. The Compression setting type.Constant filled by server. + :vartype type: str + :ivar preserve_compression_file_name_as_folder: Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). - :type preserve_compression_file_name_as_folder: any + :vartype preserve_compression_file_name_as_folder: any """ _validation = { @@ -41089,6 +55031,14 @@ def __init__( preserve_compression_file_name_as_folder: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword preserve_compression_file_name_as_folder: Preserve the compression file name as folder + path. Type: boolean (or Expression with resultType boolean). + :paramtype preserve_compression_file_name_as_folder: any + """ super(TarGZipReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'TarGZipReadSettings' # type: str self.preserve_compression_file_name_as_folder = preserve_compression_file_name_as_folder @@ -41099,14 +55049,14 @@ class TarReadSettings(CompressionReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The Compression setting type.Constant filled by server. - :type type: str - :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder + :vartype additional_properties: dict[str, any] + :ivar type: Required. The Compression setting type.Constant filled by server. + :vartype type: str + :ivar preserve_compression_file_name_as_folder: Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). - :type preserve_compression_file_name_as_folder: any + :vartype preserve_compression_file_name_as_folder: any """ _validation = { @@ -41126,6 +55076,14 @@ def __init__( preserve_compression_file_name_as_folder: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword preserve_compression_file_name_as_folder: Preserve the compression file name as folder + path. Type: boolean (or Expression with resultType boolean). + :paramtype preserve_compression_file_name_as_folder: any + """ super(TarReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'TarReadSettings' # type: str self.preserve_compression_file_name_as_folder = preserve_compression_file_name_as_folder @@ -41136,36 +55094,36 @@ class TeradataLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: Teradata ODBC connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: Teradata ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param server: Server name for connection. Type: string (or Expression with resultType string). - :type server: any - :param authentication_type: AuthenticationType to be used for connection. Possible values + :vartype connection_string: any + :ivar server: Server name for connection. Type: string (or Expression with resultType string). + :vartype server: any + :ivar authentication_type: AuthenticationType to be used for connection. Possible values include: "Basic", "Windows". - :type authentication_type: str or ~azure.mgmt.datafactory.models.TeradataAuthenticationType - :param username: Username for authentication. Type: string (or Expression with resultType + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.TeradataAuthenticationType + :ivar username: Username for authentication. Type: string (or Expression with resultType string). - :type username: any - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype username: any + :ivar password: Password for authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -41203,6 +55161,38 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: Teradata ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword server: Server name for connection. Type: string (or Expression with resultType + string). + :paramtype server: any + :keyword authentication_type: AuthenticationType to be used for connection. Possible values + include: "Basic", "Windows". + :paramtype authentication_type: str or + ~azure.mgmt.datafactory.models.TeradataAuthenticationType + :keyword username: Username for authentication. Type: string (or Expression with resultType + string). + :paramtype username: any + :keyword password: Password for authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(TeradataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Teradata' # type: str self.connection_string = connection_string @@ -41216,17 +55206,17 @@ def __init__( class TeradataPartitionSettings(msrest.serialization.Model): """The settings that will be leveraged for teradata source partitioning. - :param partition_column_name: The name of the column that will be used for proceeding range or + :ivar partition_column_name: The name of the column that will be used for proceeding range or hash partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: any - :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + :vartype partition_column_name: any + :ivar partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_upper_bound: any - :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + :vartype partition_upper_bound: any + :ivar partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_lower_bound: any + :vartype partition_lower_bound: any """ _attribute_map = { @@ -41243,6 +55233,19 @@ def __init__( partition_lower_bound: Optional[Any] = None, **kwargs ): + """ + :keyword partition_column_name: The name of the column that will be used for proceeding range + or hash partitioning. Type: string (or Expression with resultType string). + :paramtype partition_column_name: any + :keyword partition_upper_bound: The maximum value of column specified in partitionColumnName + that will be used for proceeding range partitioning. Type: string (or Expression with + resultType string). + :paramtype partition_upper_bound: any + :keyword partition_lower_bound: The minimum value of column specified in partitionColumnName + that will be used for proceeding range partitioning. Type: string (or Expression with + resultType string). + :paramtype partition_lower_bound: any + """ super(TeradataPartitionSettings, self).__init__(**kwargs) self.partition_column_name = partition_column_name self.partition_upper_bound = partition_upper_bound @@ -41254,37 +55257,36 @@ class TeradataSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: Teradata query. Type: string (or Expression with resultType string). - :type query: any - :param partition_option: The partition mechanism that will be used for teradata read in + :vartype additional_columns: any + :ivar query: Teradata query. Type: string (or Expression with resultType string). + :vartype query: any + :ivar partition_option: The partition mechanism that will be used for teradata read in parallel. Possible values include: "None", "Hash", "DynamicRange". - :type partition_option: any - :param partition_settings: The settings that will be leveraged for teradata source - partitioning. - :type partition_settings: ~azure.mgmt.datafactory.models.TeradataPartitionSettings + :vartype partition_option: any + :ivar partition_settings: The settings that will be leveraged for teradata source partitioning. + :vartype partition_settings: ~azure.mgmt.datafactory.models.TeradataPartitionSettings """ _validation = { @@ -41320,6 +55322,37 @@ def __init__( partition_settings: Optional["TeradataPartitionSettings"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: Teradata query. Type: string (or Expression with resultType string). + :paramtype query: any + :keyword partition_option: The partition mechanism that will be used for teradata read in + parallel. Possible values include: "None", "Hash", "DynamicRange". + :paramtype partition_option: any + :keyword partition_settings: The settings that will be leveraged for teradata source + partitioning. + :paramtype partition_settings: ~azure.mgmt.datafactory.models.TeradataPartitionSettings + """ super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'TeradataSource' # type: str self.query = query @@ -41332,33 +55365,33 @@ class TeradataTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param database: The database name of Teradata. Type: string (or Expression with resultType + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar database: The database name of Teradata. Type: string (or Expression with resultType string). - :type database: any - :param table: The table name of Teradata. Type: string (or Expression with resultType string). - :type table: any + :vartype database: any + :ivar table: The table name of Teradata. Type: string (or Expression with resultType string). + :vartype table: any """ _validation = { @@ -41395,6 +55428,34 @@ def __init__( table: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword database: The database name of Teradata. Type: string (or Expression with resultType + string). + :paramtype database: any + :keyword table: The table name of Teradata. Type: string (or Expression with resultType + string). + :paramtype table: any + """ super(TeradataTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'TeradataTable' # type: str self.database = database @@ -41406,42 +55467,42 @@ class TextFormat(DatasetStorageFormat): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset storage format.Constant filled by server. - :type type: str - :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: any - :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: any - :param column_delimiter: The column delimiter. Type: string (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset storage format.Constant filled by server. + :vartype type: str + :ivar serializer: Serializer. Type: string (or Expression with resultType string). + :vartype serializer: any + :ivar deserializer: Deserializer. Type: string (or Expression with resultType string). + :vartype deserializer: any + :ivar column_delimiter: The column delimiter. Type: string (or Expression with resultType string). - :type column_delimiter: any - :param row_delimiter: The row delimiter. Type: string (or Expression with resultType string). - :type row_delimiter: any - :param escape_char: The escape character. Type: string (or Expression with resultType string). - :type escape_char: any - :param quote_char: The quote character. Type: string (or Expression with resultType string). - :type quote_char: any - :param null_value: The null value string. Type: string (or Expression with resultType string). - :type null_value: any - :param encoding_name: The code page name of the preferred encoding. If miss, the default value + :vartype column_delimiter: any + :ivar row_delimiter: The row delimiter. Type: string (or Expression with resultType string). + :vartype row_delimiter: any + :ivar escape_char: The escape character. Type: string (or Expression with resultType string). + :vartype escape_char: any + :ivar quote_char: The quote character. Type: string (or Expression with resultType string). + :vartype quote_char: any + :ivar null_value: The null value string. Type: string (or Expression with resultType string). + :vartype null_value: any + :ivar encoding_name: The code page name of the preferred encoding. If miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). - :type encoding_name: any - :param treat_empty_as_null: Treat empty column values in the text file as null. The default + :vartype encoding_name: any + :ivar treat_empty_as_null: Treat empty column values in the text file as null. The default value is true. Type: boolean (or Expression with resultType boolean). - :type treat_empty_as_null: any - :param skip_line_count: The number of lines/rows to be skipped when parsing text files. The + :vartype treat_empty_as_null: any + :ivar skip_line_count: The number of lines/rows to be skipped when parsing text files. The default value is 0. Type: integer (or Expression with resultType integer). - :type skip_line_count: any - :param first_row_as_header: When used as input, treat the first row of data as headers. When + :vartype skip_line_count: any + :ivar first_row_as_header: When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). - :type first_row_as_header: any + :vartype first_row_as_header: any """ _validation = { @@ -41481,6 +55542,44 @@ def __init__( first_row_as_header: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword serializer: Serializer. Type: string (or Expression with resultType string). + :paramtype serializer: any + :keyword deserializer: Deserializer. Type: string (or Expression with resultType string). + :paramtype deserializer: any + :keyword column_delimiter: The column delimiter. Type: string (or Expression with resultType + string). + :paramtype column_delimiter: any + :keyword row_delimiter: The row delimiter. Type: string (or Expression with resultType string). + :paramtype row_delimiter: any + :keyword escape_char: The escape character. Type: string (or Expression with resultType + string). + :paramtype escape_char: any + :keyword quote_char: The quote character. Type: string (or Expression with resultType string). + :paramtype quote_char: any + :keyword null_value: The null value string. Type: string (or Expression with resultType + string). + :paramtype null_value: any + :keyword encoding_name: The code page name of the preferred encoding. If miss, the default + value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode encoding. Refer to the ΓÇ£NameΓÇ¥ + column of the table in the following link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + resultType string). + :paramtype encoding_name: any + :keyword treat_empty_as_null: Treat empty column values in the text file as null. The default + value is true. Type: boolean (or Expression with resultType boolean). + :paramtype treat_empty_as_null: any + :keyword skip_line_count: The number of lines/rows to be skipped when parsing text files. The + default value is 0. Type: integer (or Expression with resultType integer). + :paramtype skip_line_count: any + :keyword first_row_as_header: When used as input, treat the first row of data as headers. When + used as output,write the headers into the output as the first row of data. The default value is + false. Type: boolean (or Expression with resultType boolean). + :paramtype first_row_as_header: any + """ super(TextFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) self.type = 'TextFormat' # type: str self.column_delimiter = column_delimiter @@ -41502,10 +55601,10 @@ class TriggerDependencyReference(DependencyReference): All required parameters must be populated in order to send to Azure. - :param type: Required. The type of dependency reference.Constant filled by server. - :type type: str - :param reference_trigger: Required. Referenced trigger. - :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference + :ivar type: Required. The type of dependency reference.Constant filled by server. + :vartype type: str + :ivar reference_trigger: Required. Referenced trigger. + :vartype reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference """ _validation = { @@ -41528,6 +55627,10 @@ def __init__( reference_trigger: "TriggerReference", **kwargs ): + """ + :keyword reference_trigger: Required. Referenced trigger. + :paramtype reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference + """ super(TriggerDependencyReference, self).__init__(**kwargs) self.type = 'TriggerDependencyReference' # type: str self.reference_trigger = reference_trigger @@ -41536,12 +55639,12 @@ def __init__( class TriggerFilterParameters(msrest.serialization.Model): """Query parameters for triggers. - :param continuation_token: The continuation token for getting the next page of results. Null - for first page. - :type continuation_token: str - :param parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child rerun + :ivar continuation_token: The continuation token for getting the next page of results. Null for + first page. + :vartype continuation_token: str + :ivar parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child rerun triggers. - :type parent_trigger_name: str + :vartype parent_trigger_name: str """ _attribute_map = { @@ -41556,6 +55659,14 @@ def __init__( parent_trigger_name: Optional[str] = None, **kwargs ): + """ + :keyword continuation_token: The continuation token for getting the next page of results. Null + for first page. + :paramtype continuation_token: str + :keyword parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child + rerun triggers. + :paramtype parent_trigger_name: str + """ super(TriggerFilterParameters, self).__init__(**kwargs) self.continuation_token = continuation_token self.parent_trigger_name = parent_trigger_name @@ -41566,10 +55677,10 @@ class TriggerListResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of triggers. - :type value: list[~azure.mgmt.datafactory.models.TriggerResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :ivar value: Required. List of triggers. + :vartype value: list[~azure.mgmt.datafactory.models.TriggerResource] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str """ _validation = { @@ -41588,6 +55699,12 @@ def __init__( next_link: Optional[str] = None, **kwargs ): + """ + :keyword value: Required. List of triggers. + :paramtype value: list[~azure.mgmt.datafactory.models.TriggerResource] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ super(TriggerListResponse, self).__init__(**kwargs) self.value = value self.next_link = next_link @@ -41596,10 +55713,10 @@ def __init__( class TriggerPipelineReference(msrest.serialization.Model): """Pipeline that needs to be triggered with the given parameters. - :param pipeline_reference: Pipeline reference. - :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference - :param parameters: Pipeline parameters. - :type parameters: dict[str, any] + :ivar pipeline_reference: Pipeline reference. + :vartype pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference + :ivar parameters: Pipeline parameters. + :vartype parameters: dict[str, any] """ _attribute_map = { @@ -41614,6 +55731,12 @@ def __init__( parameters: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword pipeline_reference: Pipeline reference. + :paramtype pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference + :keyword parameters: Pipeline parameters. + :paramtype parameters: dict[str, any] + """ super(TriggerPipelineReference, self).__init__(**kwargs) self.pipeline_reference = pipeline_reference self.parameters = parameters @@ -41624,11 +55747,11 @@ class TriggerQueryResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of triggers. - :type value: list[~azure.mgmt.datafactory.models.TriggerResource] - :param continuation_token: The continuation token for getting the next page of results, if any + :ivar value: Required. List of triggers. + :vartype value: list[~azure.mgmt.datafactory.models.TriggerResource] + :ivar continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. - :type continuation_token: str + :vartype continuation_token: str """ _validation = { @@ -41647,6 +55770,13 @@ def __init__( continuation_token: Optional[str] = None, **kwargs ): + """ + :keyword value: Required. List of triggers. + :paramtype value: list[~azure.mgmt.datafactory.models.TriggerResource] + :keyword continuation_token: The continuation token for getting the next page of results, if + any remaining results exist, null otherwise. + :paramtype continuation_token: str + """ super(TriggerQueryResponse, self).__init__(**kwargs) self.value = value self.continuation_token = continuation_token @@ -41661,8 +55791,8 @@ class TriggerReference(msrest.serialization.Model): :ivar type: Trigger reference type. Has constant value: "TriggerReference". :vartype type: str - :param reference_name: Required. Reference trigger name. - :type reference_name: str + :ivar reference_name: Required. Reference trigger name. + :vartype reference_name: str """ _validation = { @@ -41683,6 +55813,10 @@ def __init__( reference_name: str, **kwargs ): + """ + :keyword reference_name: Required. Reference trigger name. + :paramtype reference_name: str + """ super(TriggerReference, self).__init__(**kwargs) self.reference_name = reference_name @@ -41702,8 +55836,8 @@ class TriggerResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Required. Properties of the trigger. - :type properties: ~azure.mgmt.datafactory.models.Trigger + :ivar properties: Required. Properties of the trigger. + :vartype properties: ~azure.mgmt.datafactory.models.Trigger """ _validation = { @@ -41728,6 +55862,10 @@ def __init__( properties: "Trigger", **kwargs ): + """ + :keyword properties: Required. Properties of the trigger. + :paramtype properties: ~azure.mgmt.datafactory.models.Trigger + """ super(TriggerResource, self).__init__(**kwargs) self.properties = properties @@ -41737,9 +55875,9 @@ class TriggerRun(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] + :vartype additional_properties: dict[str, any] :ivar trigger_run_id: Trigger run id. :vartype trigger_run_id: str :ivar trigger_name: Trigger name. @@ -41796,6 +55934,11 @@ def __init__( additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + """ super(TriggerRun, self).__init__(**kwargs) self.additional_properties = additional_properties self.trigger_run_id = None @@ -41815,11 +55958,11 @@ class TriggerRunsQueryResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param value: Required. List of trigger runs. - :type value: list[~azure.mgmt.datafactory.models.TriggerRun] - :param continuation_token: The continuation token for getting the next page of results, if any + :ivar value: Required. List of trigger runs. + :vartype value: list[~azure.mgmt.datafactory.models.TriggerRun] + :ivar continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. - :type continuation_token: str + :vartype continuation_token: str """ _validation = { @@ -41838,6 +55981,13 @@ def __init__( continuation_token: Optional[str] = None, **kwargs ): + """ + :keyword value: Required. List of trigger runs. + :paramtype value: list[~azure.mgmt.datafactory.models.TriggerRun] + :keyword continuation_token: The continuation token for getting the next page of results, if + any remaining results exist, null otherwise. + :paramtype continuation_token: str + """ super(TriggerRunsQueryResponse, self).__init__(**kwargs) self.value = value self.continuation_token = continuation_token @@ -41869,6 +56019,8 @@ def __init__( self, **kwargs ): + """ + """ super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) self.trigger_name = None self.status = None @@ -41881,45 +56033,45 @@ class TumblingWindowTrigger(Trigger): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str + :vartype additional_properties: dict[str, any] + :ivar type: Required. Trigger type.Constant filled by server. + :vartype type: str + :ivar description: Trigger description. + :vartype description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[any] - :param pipeline: Required. Pipeline for which runs are created when an event is fired for + :ivar annotations: List of tags that can be used for describing the trigger. + :vartype annotations: list[any] + :ivar pipeline: Required. Pipeline for which runs are created when an event is fired for trigger window that is ready. - :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference - :param frequency: Required. The frequency of the time windows. Possible values include: + :vartype pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference + :ivar frequency: Required. The frequency of the time windows. Possible values include: "Minute", "Hour", "Month". - :type frequency: str or ~azure.mgmt.datafactory.models.TumblingWindowFrequency - :param interval: Required. The interval of the time windows. The minimum interval allowed is 15 + :vartype frequency: str or ~azure.mgmt.datafactory.models.TumblingWindowFrequency + :ivar interval: Required. The interval of the time windows. The minimum interval allowed is 15 Minutes. - :type interval: int - :param start_time: Required. The start time for the time period for the trigger during which + :vartype interval: int + :ivar start_time: Required. The start time for the time period for the trigger during which events are fired for windows that are ready. Only UTC time is currently supported. - :type start_time: ~datetime.datetime - :param end_time: The end time for the time period for the trigger during which events are fired + :vartype start_time: ~datetime.datetime + :ivar end_time: The end time for the time period for the trigger during which events are fired for windows that are ready. Only UTC time is currently supported. - :type end_time: ~datetime.datetime - :param delay: Specifies how long the trigger waits past due time before triggering new run. It + :vartype end_time: ~datetime.datetime + :ivar delay: Specifies how long the trigger waits past due time before triggering new run. It doesn't alter window start and end time. The default is 0. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type delay: any - :param max_concurrency: Required. The max number of parallel time windows (ready for execution) + :vartype delay: any + :ivar max_concurrency: Required. The max number of parallel time windows (ready for execution) for which a new run is triggered. - :type max_concurrency: int - :param retry_policy: Retry policy that will be applied for failed pipeline runs. - :type retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy - :param depends_on: Triggers that this trigger depends on. Only tumbling window triggers are + :vartype max_concurrency: int + :ivar retry_policy: Retry policy that will be applied for failed pipeline runs. + :vartype retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy + :ivar depends_on: Triggers that this trigger depends on. Only tumbling window triggers are supported. - :type depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] + :vartype depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] """ _validation = { @@ -41966,6 +56118,42 @@ def __init__( depends_on: Optional[List["DependencyReference"]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Trigger description. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the trigger. + :paramtype annotations: list[any] + :keyword pipeline: Required. Pipeline for which runs are created when an event is fired for + trigger window that is ready. + :paramtype pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference + :keyword frequency: Required. The frequency of the time windows. Possible values include: + "Minute", "Hour", "Month". + :paramtype frequency: str or ~azure.mgmt.datafactory.models.TumblingWindowFrequency + :keyword interval: Required. The interval of the time windows. The minimum interval allowed is + 15 Minutes. + :paramtype interval: int + :keyword start_time: Required. The start time for the time period for the trigger during which + events are fired for windows that are ready. Only UTC time is currently supported. + :paramtype start_time: ~datetime.datetime + :keyword end_time: The end time for the time period for the trigger during which events are + fired for windows that are ready. Only UTC time is currently supported. + :paramtype end_time: ~datetime.datetime + :keyword delay: Specifies how long the trigger waits past due time before triggering new run. + It doesn't alter window start and end time. The default is 0. Type: string (or Expression with + resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype delay: any + :keyword max_concurrency: Required. The max number of parallel time windows (ready for + execution) for which a new run is triggered. + :paramtype max_concurrency: int + :keyword retry_policy: Retry policy that will be applied for failed pipeline runs. + :paramtype retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy + :keyword depends_on: Triggers that this trigger depends on. Only tumbling window triggers are + supported. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] + """ super(TumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) self.type = 'TumblingWindowTrigger' # type: str self.pipeline = pipeline @@ -41984,16 +56172,16 @@ class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): All required parameters must be populated in order to send to Azure. - :param type: Required. The type of dependency reference.Constant filled by server. - :type type: str - :param reference_trigger: Required. Referenced trigger. - :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference - :param offset: Timespan applied to the start time of a tumbling window when evaluating + :ivar type: Required. The type of dependency reference.Constant filled by server. + :vartype type: str + :ivar reference_trigger: Required. Referenced trigger. + :vartype reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference + :ivar offset: Timespan applied to the start time of a tumbling window when evaluating dependency. - :type offset: str - :param size: The size of the window when evaluating the dependency. If undefined the frequency + :vartype offset: str + :ivar size: The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be used. - :type size: str + :vartype size: str """ _validation = { @@ -42018,6 +56206,16 @@ def __init__( size: Optional[str] = None, **kwargs ): + """ + :keyword reference_trigger: Required. Referenced trigger. + :paramtype reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference + :keyword offset: Timespan applied to the start time of a tumbling window when evaluating + dependency. + :paramtype offset: str + :keyword size: The size of the window when evaluating the dependency. If undefined the + frequency of the tumbling window will be used. + :paramtype size: str + """ super(TumblingWindowTriggerDependencyReference, self).__init__(reference_trigger=reference_trigger, **kwargs) self.type = 'TumblingWindowTriggerDependencyReference' # type: str self.offset = offset @@ -42027,24 +56225,24 @@ def __init__( class TypeConversionSettings(msrest.serialization.Model): """Type conversion settings. - :param allow_data_truncation: Whether to allow data truncation when converting the data. Type: + :ivar allow_data_truncation: Whether to allow data truncation when converting the data. Type: boolean (or Expression with resultType boolean). - :type allow_data_truncation: any - :param treat_boolean_as_number: Whether to treat boolean values as numbers. Type: boolean (or + :vartype allow_data_truncation: any + :ivar treat_boolean_as_number: Whether to treat boolean values as numbers. Type: boolean (or Expression with resultType boolean). - :type treat_boolean_as_number: any - :param date_time_format: The format for DateTime values. Type: string (or Expression with + :vartype treat_boolean_as_number: any + :ivar date_time_format: The format for DateTime values. Type: string (or Expression with resultType string). - :type date_time_format: any - :param date_time_offset_format: The format for DateTimeOffset values. Type: string (or + :vartype date_time_format: any + :ivar date_time_offset_format: The format for DateTimeOffset values. Type: string (or Expression with resultType string). - :type date_time_offset_format: any - :param time_span_format: The format for TimeSpan values. Type: string (or Expression with + :vartype date_time_offset_format: any + :ivar time_span_format: The format for TimeSpan values. Type: string (or Expression with resultType string). - :type time_span_format: any - :param culture: The culture used to convert data from/to string. Type: string (or Expression + :vartype time_span_format: any + :ivar culture: The culture used to convert data from/to string. Type: string (or Expression with resultType string). - :type culture: any + :vartype culture: any """ _attribute_map = { @@ -42067,6 +56265,26 @@ def __init__( culture: Optional[Any] = None, **kwargs ): + """ + :keyword allow_data_truncation: Whether to allow data truncation when converting the data. + Type: boolean (or Expression with resultType boolean). + :paramtype allow_data_truncation: any + :keyword treat_boolean_as_number: Whether to treat boolean values as numbers. Type: boolean (or + Expression with resultType boolean). + :paramtype treat_boolean_as_number: any + :keyword date_time_format: The format for DateTime values. Type: string (or Expression with + resultType string). + :paramtype date_time_format: any + :keyword date_time_offset_format: The format for DateTimeOffset values. Type: string (or + Expression with resultType string). + :paramtype date_time_offset_format: any + :keyword time_span_format: The format for TimeSpan values. Type: string (or Expression with + resultType string). + :paramtype time_span_format: any + :keyword culture: The culture used to convert data from/to string. Type: string (or Expression + with resultType string). + :paramtype culture: any + """ super(TypeConversionSettings, self).__init__(**kwargs) self.allow_data_truncation = allow_data_truncation self.treat_boolean_as_number = treat_boolean_as_number @@ -42081,30 +56299,30 @@ class UntilActivity(ControlActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param expression: Required. An expression that would evaluate to Boolean. The loop will + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar expression: Required. An expression that would evaluate to Boolean. The loop will continue until this expression evaluates to true. - :type expression: ~azure.mgmt.datafactory.models.Expression - :param timeout: Specifies the timeout for the activity to run. If there is no value specified, + :vartype expression: ~azure.mgmt.datafactory.models.Expression + :ivar timeout: Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: any - :param activities: Required. List of activities to execute. - :type activities: list[~azure.mgmt.datafactory.models.Activity] + :vartype timeout: any + :ivar activities: Required. List of activities to execute. + :vartype activities: list[~azure.mgmt.datafactory.models.Activity] """ _validation = { @@ -42139,6 +56357,30 @@ def __init__( timeout: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword expression: Required. An expression that would evaluate to Boolean. The loop will + continue until this expression evaluates to true. + :paramtype expression: ~azure.mgmt.datafactory.models.Expression + :keyword timeout: Specifies the timeout for the activity to run. If there is no value + specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: string (or Expression with + resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype timeout: any + :keyword activities: Required. List of activities to execute. + :paramtype activities: list[~azure.mgmt.datafactory.models.Activity] + """ super(UntilActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.type = 'Until' # type: str self.expression = expression @@ -42149,9 +56391,9 @@ def __init__( class UpdateIntegrationRuntimeNodeRequest(msrest.serialization.Model): """Update integration runtime node request. - :param concurrent_jobs_limit: The number of concurrent jobs permitted to run on the integration + :ivar concurrent_jobs_limit: The number of concurrent jobs permitted to run on the integration runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed. - :type concurrent_jobs_limit: int + :vartype concurrent_jobs_limit: int """ _validation = { @@ -42168,6 +56410,11 @@ def __init__( concurrent_jobs_limit: Optional[int] = None, **kwargs ): + """ + :keyword concurrent_jobs_limit: The number of concurrent jobs permitted to run on the + integration runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed. + :paramtype concurrent_jobs_limit: int + """ super(UpdateIntegrationRuntimeNodeRequest, self).__init__(**kwargs) self.concurrent_jobs_limit = concurrent_jobs_limit @@ -42175,13 +56422,13 @@ def __init__( class UpdateIntegrationRuntimeRequest(msrest.serialization.Model): """Update integration runtime request. - :param auto_update: Enables or disables the auto-update feature of the self-hosted integration + :ivar auto_update: Enables or disables the auto-update feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189. Possible values include: "On", "Off". - :type auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate - :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The + :vartype auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate + :ivar update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time. - :type update_delay_offset: str + :vartype update_delay_offset: str """ _attribute_map = { @@ -42196,6 +56443,15 @@ def __init__( update_delay_offset: Optional[str] = None, **kwargs ): + """ + :keyword auto_update: Enables or disables the auto-update feature of the self-hosted + integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189. Possible values + include: "On", "Off". + :paramtype auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate + :keyword update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. + The integration runtime auto update will happen on that time. + :paramtype update_delay_offset: str + """ super(UpdateIntegrationRuntimeRequest, self).__init__(**kwargs) self.auto_update = auto_update self.update_delay_offset = update_delay_offset @@ -42204,20 +56460,20 @@ def __init__( class UserAccessPolicy(msrest.serialization.Model): """Get Data Plane read only token request definition. - :param permissions: The string with permissions for Data Plane access. Currently only 'r' is + :ivar permissions: The string with permissions for Data Plane access. Currently only 'r' is supported which grants read only access. - :type permissions: str - :param access_resource_path: The resource path to get access relative to factory. Currently - only empty string is supported which corresponds to the factory resource. - :type access_resource_path: str - :param profile_name: The name of the profile. Currently only the default is supported. The + :vartype permissions: str + :ivar access_resource_path: The resource path to get access relative to factory. Currently only + empty string is supported which corresponds to the factory resource. + :vartype access_resource_path: str + :ivar profile_name: The name of the profile. Currently only the default is supported. The default value is DefaultProfile. - :type profile_name: str - :param start_time: Start time for the token. If not specified the current time will be used. - :type start_time: str - :param expire_time: Expiration time for the token. Maximum duration for the token is eight - hours and by default the token will expire in eight hours. - :type expire_time: str + :vartype profile_name: str + :ivar start_time: Start time for the token. If not specified the current time will be used. + :vartype start_time: str + :ivar expire_time: Expiration time for the token. Maximum duration for the token is eight hours + and by default the token will expire in eight hours. + :vartype expire_time: str """ _attribute_map = { @@ -42238,6 +56494,22 @@ def __init__( expire_time: Optional[str] = None, **kwargs ): + """ + :keyword permissions: The string with permissions for Data Plane access. Currently only 'r' is + supported which grants read only access. + :paramtype permissions: str + :keyword access_resource_path: The resource path to get access relative to factory. Currently + only empty string is supported which corresponds to the factory resource. + :paramtype access_resource_path: str + :keyword profile_name: The name of the profile. Currently only the default is supported. The + default value is DefaultProfile. + :paramtype profile_name: str + :keyword start_time: Start time for the token. If not specified the current time will be used. + :paramtype start_time: str + :keyword expire_time: Expiration time for the token. Maximum duration for the token is eight + hours and by default the token will expire in eight hours. + :paramtype expire_time: str + """ super(UserAccessPolicy, self).__init__(**kwargs) self.permissions = permissions self.access_resource_path = access_resource_path @@ -42251,11 +56523,11 @@ class UserProperty(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param name: Required. User property name. - :type name: str - :param value: Required. User property value. Type: string (or Expression with resultType + :ivar name: Required. User property name. + :vartype name: str + :ivar value: Required. User property value. Type: string (or Expression with resultType string). - :type value: any + :vartype value: any """ _validation = { @@ -42275,6 +56547,13 @@ def __init__( value: Any, **kwargs ): + """ + :keyword name: Required. User property name. + :paramtype name: str + :keyword value: Required. User property value. Type: string (or Expression with resultType + string). + :paramtype value: any + """ super(UserProperty, self).__init__(**kwargs) self.name = name self.value = value @@ -42285,36 +56564,36 @@ class ValidationActivity(ControlActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param timeout: Specifies the timeout for the activity to run. If there is no value specified, + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar timeout: Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: any - :param sleep: A delay in seconds between validation attempts. If no value is specified, 10 + :vartype timeout: any + :ivar sleep: A delay in seconds between validation attempts. If no value is specified, 10 seconds will be used as the default. Type: integer (or Expression with resultType integer). - :type sleep: any - :param minimum_size: Can be used if dataset points to a file. The file must be greater than or + :vartype sleep: any + :ivar minimum_size: Can be used if dataset points to a file. The file must be greater than or equal in size to the value specified. Type: integer (or Expression with resultType integer). - :type minimum_size: any - :param child_items: Can be used if dataset points to a folder. If set to true, the folder must + :vartype minimum_size: any + :ivar child_items: Can be used if dataset points to a folder. If set to true, the folder must have at least one file. If set to false, the folder must be empty. Type: boolean (or Expression with resultType boolean). - :type child_items: any - :param dataset: Required. Validation activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + :vartype child_items: any + :ivar dataset: Required. Validation activity dataset reference. + :vartype dataset: ~azure.mgmt.datafactory.models.DatasetReference """ _validation = { @@ -42352,6 +56631,36 @@ def __init__( child_items: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword timeout: Specifies the timeout for the activity to run. If there is no value + specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype timeout: any + :keyword sleep: A delay in seconds between validation attempts. If no value is specified, 10 + seconds will be used as the default. Type: integer (or Expression with resultType integer). + :paramtype sleep: any + :keyword minimum_size: Can be used if dataset points to a file. The file must be greater than + or equal in size to the value specified. Type: integer (or Expression with resultType integer). + :paramtype minimum_size: any + :keyword child_items: Can be used if dataset points to a folder. If set to true, the folder + must have at least one file. If set to false, the folder must be empty. Type: boolean (or + Expression with resultType boolean). + :paramtype child_items: any + :keyword dataset: Required. Validation activity dataset reference. + :paramtype dataset: ~azure.mgmt.datafactory.models.DatasetReference + """ super(ValidationActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.type = 'Validation' # type: str self.timeout = timeout @@ -42366,10 +56675,10 @@ class VariableSpecification(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. Variable type. Possible values include: "String", "Bool", "Array". - :type type: str or ~azure.mgmt.datafactory.models.VariableType - :param default_value: Default value of variable. - :type default_value: any + :ivar type: Required. Variable type. Possible values include: "String", "Bool", "Array". + :vartype type: str or ~azure.mgmt.datafactory.models.VariableType + :ivar default_value: Default value of variable. + :vartype default_value: any """ _validation = { @@ -42388,6 +56697,12 @@ def __init__( default_value: Optional[Any] = None, **kwargs ): + """ + :keyword type: Required. Variable type. Possible values include: "String", "Bool", "Array". + :paramtype type: str or ~azure.mgmt.datafactory.models.VariableType + :keyword default_value: Default value of variable. + :paramtype default_value: any + """ super(VariableSpecification, self).__init__(**kwargs) self.type = type self.default_value = default_value @@ -42398,28 +56713,28 @@ class VerticaLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_string: An ODBC connection string. Type: string, SecureString or + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: any - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype connection_string: any + :ivar pwd: The Azure key vault secret reference of password in connection string. + :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -42451,6 +56766,28 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :paramtype connection_string: any + :keyword pwd: The Azure key vault secret reference of password in connection string. + :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(VerticaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Vertica' # type: str self.connection_string = connection_string @@ -42463,32 +56800,32 @@ class VerticaSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -42520,6 +56857,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'VerticaSource' # type: str self.query = query @@ -42530,37 +56893,37 @@ class VerticaTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: any - :param table: The table name of the Vertica. Type: string (or Expression with resultType + :vartype table_name: any + :ivar table: The table name of the Vertica. Type: string (or Expression with resultType string). - :type table: any - :param schema_type_properties_schema: The schema name of the Vertica. Type: string (or + :vartype table: any + :ivar schema_type_properties_schema: The schema name of the Vertica. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: any + :vartype schema_type_properties_schema: any """ _validation = { @@ -42599,6 +56962,37 @@ def __init__( schema_type_properties_schema: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: This property will be retired. Please consider using schema + table + properties instead. + :paramtype table_name: any + :keyword table: The table name of the Vertica. Type: string (or Expression with resultType + string). + :paramtype table: any + :keyword schema_type_properties_schema: The schema name of the Vertica. Type: string (or + Expression with resultType string). + :paramtype schema_type_properties_schema: any + """ super(VerticaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'VerticaTable' # type: str self.table_name = table_name @@ -42611,21 +57005,21 @@ class WaitActivity(ControlActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param wait_time_in_seconds: Required. Duration in seconds. - :type wait_time_in_seconds: any + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar wait_time_in_seconds: Required. Duration in seconds. + :vartype wait_time_in_seconds: any """ _validation = { @@ -42655,6 +57049,21 @@ def __init__( user_properties: Optional[List["UserProperty"]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword wait_time_in_seconds: Required. Duration in seconds. + :paramtype wait_time_in_seconds: any + """ super(WaitActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.type = 'Wait' # type: str self.wait_time_in_seconds = wait_time_in_seconds @@ -42665,44 +57074,44 @@ class WebActivity(ExecutionActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param method: Required. Rest API method for target endpoint. Possible values include: "GET", + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar linked_service_name: Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :ivar method: Required. Rest API method for target endpoint. Possible values include: "GET", "POST", "PUT", "DELETE". - :type method: str or ~azure.mgmt.datafactory.models.WebActivityMethod - :param url: Required. Web activity target endpoint and path. Type: string (or Expression with + :vartype method: str or ~azure.mgmt.datafactory.models.WebActivityMethod + :ivar url: Required. Web activity target endpoint and path. Type: string (or Expression with resultType string). - :type url: any - :param headers: Represents the headers that will be sent to the request. For example, to set - the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": + :vartype url: any + :ivar headers: Represents the headers that will be sent to the request. For example, to set the + language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - :type headers: any - :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT + :vartype headers: any + :ivar body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). - :type body: any - :param authentication: Authentication method used for calling the endpoint. - :type authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication - :param datasets: List of datasets passed to web endpoint. - :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] - :param linked_services: List of linked services passed to web endpoint. - :type linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :vartype body: any + :ivar authentication: Authentication method used for calling the endpoint. + :vartype authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication + :ivar datasets: List of datasets passed to web endpoint. + :vartype datasets: list[~azure.mgmt.datafactory.models.DatasetReference] + :ivar linked_services: List of linked services passed to web endpoint. + :vartype linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference """ _validation = { @@ -42751,6 +57160,44 @@ def __init__( connect_via: Optional["IntegrationRuntimeReference"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword linked_service_name: Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :keyword method: Required. Rest API method for target endpoint. Possible values include: "GET", + "POST", "PUT", "DELETE". + :paramtype method: str or ~azure.mgmt.datafactory.models.WebActivityMethod + :keyword url: Required. Web activity target endpoint and path. Type: string (or Expression with + resultType string). + :paramtype url: any + :keyword headers: Represents the headers that will be sent to the request. For example, to set + the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": + "application/json" }. Type: string (or Expression with resultType string). + :paramtype headers: any + :keyword body: Represents the payload that will be sent to the endpoint. Required for POST/PUT + method, not allowed for GET method Type: string (or Expression with resultType string). + :paramtype body: any + :keyword authentication: Authentication method used for calling the endpoint. + :paramtype authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication + :keyword datasets: List of datasets passed to web endpoint. + :paramtype datasets: list[~azure.mgmt.datafactory.models.DatasetReference] + :keyword linked_services: List of linked services passed to web endpoint. + :paramtype linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + """ super(WebActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'WebActivity' # type: str self.method = method @@ -42766,25 +57213,24 @@ def __init__( class WebActivityAuthentication(msrest.serialization.Model): """Web activity authentication properties. - :param type: Web activity authentication (Basic/ClientCertificate/MSI/ServicePrincipal). - :type type: str - :param pfx: Base64-encoded contents of a PFX file or Certificate when used for - ServicePrincipal. - :type pfx: ~azure.mgmt.datafactory.models.SecretBase - :param username: Web activity authentication user name for basic authentication or ClientID - when used for ServicePrincipal. Type: string (or Expression with resultType string). - :type username: any - :param password: Password for the PFX file or basic authentication / Secret when used for + :ivar type: Web activity authentication (Basic/ClientCertificate/MSI/ServicePrincipal). + :vartype type: str + :ivar pfx: Base64-encoded contents of a PFX file or Certificate when used for ServicePrincipal. + :vartype pfx: ~azure.mgmt.datafactory.models.SecretBase + :ivar username: Web activity authentication user name for basic authentication or ClientID when + used for ServicePrincipal. Type: string (or Expression with resultType string). + :vartype username: any + :ivar password: Password for the PFX file or basic authentication / Secret when used for ServicePrincipal. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param resource: Resource for which Azure Auth token will be requested when using MSI + :vartype password: ~azure.mgmt.datafactory.models.SecretBase + :ivar resource: Resource for which Azure Auth token will be requested when using MSI Authentication. Type: string (or Expression with resultType string). - :type resource: any - :param user_tenant: TenantId for which Azure Auth token will be requested when using + :vartype resource: any + :ivar user_tenant: TenantId for which Azure Auth token will be requested when using ServicePrincipal Authentication. Type: string (or Expression with resultType string). - :type user_tenant: any - :param credential: The credential reference containing authentication information. - :type credential: ~azure.mgmt.datafactory.models.CredentialReference + :vartype user_tenant: any + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _attribute_map = { @@ -42809,6 +57255,27 @@ def __init__( credential: Optional["CredentialReference"] = None, **kwargs ): + """ + :keyword type: Web activity authentication (Basic/ClientCertificate/MSI/ServicePrincipal). + :paramtype type: str + :keyword pfx: Base64-encoded contents of a PFX file or Certificate when used for + ServicePrincipal. + :paramtype pfx: ~azure.mgmt.datafactory.models.SecretBase + :keyword username: Web activity authentication user name for basic authentication or ClientID + when used for ServicePrincipal. Type: string (or Expression with resultType string). + :paramtype username: any + :keyword password: Password for the PFX file or basic authentication / Secret when used for + ServicePrincipal. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + :keyword resource: Resource for which Azure Auth token will be requested when using MSI + Authentication. Type: string (or Expression with resultType string). + :paramtype resource: any + :keyword user_tenant: TenantId for which Azure Auth token will be requested when using + ServicePrincipal Authentication. Type: string (or Expression with resultType string). + :paramtype user_tenant: any + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ super(WebActivityAuthentication, self).__init__(**kwargs) self.type = type self.pfx = pfx @@ -42827,13 +57294,13 @@ class WebLinkedServiceTypeProperties(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . - Type: string (or Expression with resultType string). - :type url: any - :param authentication_type: Required. Type of authentication used to connect to the web table - source.Constant filled by server. Possible values include: "Basic", "Anonymous", + :ivar url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: + string (or Expression with resultType string). + :vartype url: any + :ivar authentication_type: Required. Type of authentication used to connect to the web table + source.Constant filled by server. Possible values include: "Basic", "Anonymous", "ClientCertificate". - :type authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType """ _validation = { @@ -42856,6 +57323,11 @@ def __init__( url: Any, **kwargs ): + """ + :keyword url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . + Type: string (or Expression with resultType string). + :paramtype url: any + """ super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) self.url = url self.authentication_type = None # type: Optional[str] @@ -42866,13 +57338,13 @@ class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): All required parameters must be populated in order to send to Azure. - :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . - Type: string (or Expression with resultType string). - :type url: any - :param authentication_type: Required. Type of authentication used to connect to the web table - source.Constant filled by server. Possible values include: "Basic", "Anonymous", + :ivar url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: + string (or Expression with resultType string). + :vartype url: any + :ivar authentication_type: Required. Type of authentication used to connect to the web table + source.Constant filled by server. Possible values include: "Basic", "Anonymous", "ClientCertificate". - :type authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType """ _validation = { @@ -42891,6 +57363,11 @@ def __init__( url: Any, **kwargs ): + """ + :keyword url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . + Type: string (or Expression with resultType string). + :paramtype url: any + """ super(WebAnonymousAuthentication, self).__init__(url=url, **kwargs) self.authentication_type = 'Anonymous' # type: str @@ -42900,18 +57377,18 @@ class WebBasicAuthentication(WebLinkedServiceTypeProperties): All required parameters must be populated in order to send to Azure. - :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . - Type: string (or Expression with resultType string). - :type url: any - :param authentication_type: Required. Type of authentication used to connect to the web table - source.Constant filled by server. Possible values include: "Basic", "Anonymous", + :ivar url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: + string (or Expression with resultType string). + :vartype url: any + :ivar authentication_type: Required. Type of authentication used to connect to the web table + source.Constant filled by server. Possible values include: "Basic", "Anonymous", "ClientCertificate". - :type authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType - :param username: Required. User name for Basic authentication. Type: string (or Expression with + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType + :ivar username: Required. User name for Basic authentication. Type: string (or Expression with resultType string). - :type username: any - :param password: Required. The password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase + :vartype username: any + :ivar password: Required. The password for Basic authentication. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -42936,6 +57413,16 @@ def __init__( password: "SecretBase", **kwargs ): + """ + :keyword url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . + Type: string (or Expression with resultType string). + :paramtype url: any + :keyword username: Required. User name for Basic authentication. Type: string (or Expression + with resultType string). + :paramtype username: any + :keyword password: Required. The password for Basic authentication. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + """ super(WebBasicAuthentication, self).__init__(url=url, **kwargs) self.authentication_type = 'Basic' # type: str self.username = username @@ -42947,17 +57434,17 @@ class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): All required parameters must be populated in order to send to Azure. - :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . - Type: string (or Expression with resultType string). - :type url: any - :param authentication_type: Required. Type of authentication used to connect to the web table - source.Constant filled by server. Possible values include: "Basic", "Anonymous", + :ivar url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: + string (or Expression with resultType string). + :vartype url: any + :ivar authentication_type: Required. Type of authentication used to connect to the web table + source.Constant filled by server. Possible values include: "Basic", "Anonymous", "ClientCertificate". - :type authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType - :param pfx: Required. Base64-encoded contents of a PFX file. - :type pfx: ~azure.mgmt.datafactory.models.SecretBase - :param password: Required. Password for the PFX file. - :type password: ~azure.mgmt.datafactory.models.SecretBase + :vartype authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType + :ivar pfx: Required. Base64-encoded contents of a PFX file. + :vartype pfx: ~azure.mgmt.datafactory.models.SecretBase + :ivar password: Required. Password for the PFX file. + :vartype password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -42982,6 +57469,15 @@ def __init__( password: "SecretBase", **kwargs ): + """ + :keyword url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . + Type: string (or Expression with resultType string). + :paramtype url: any + :keyword pfx: Required. Base64-encoded contents of a PFX file. + :paramtype pfx: ~azure.mgmt.datafactory.models.SecretBase + :keyword password: Required. Password for the PFX file. + :paramtype password: ~azure.mgmt.datafactory.models.SecretBase + """ super(WebClientCertificateAuthentication, self).__init__(url=url, **kwargs) self.authentication_type = 'ClientCertificate' # type: str self.pfx = pfx @@ -42993,42 +57489,42 @@ class WebHookActivity(ControlActivity): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param method: Required. Rest API method for target endpoint. Possible values include: "POST". - :type method: str or ~azure.mgmt.datafactory.models.WebHookActivityMethod - :param url: Required. WebHook activity target endpoint and path. Type: string (or Expression + :vartype additional_properties: dict[str, any] + :ivar name: Required. Activity name. + :vartype name: str + :ivar type: Required. Type of activity.Constant filled by server. + :vartype type: str + :ivar description: Activity description. + :vartype description: str + :ivar depends_on: Activity depends on condition. + :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :ivar user_properties: Activity user properties. + :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar method: Required. Rest API method for target endpoint. Possible values include: "POST". + :vartype method: str or ~azure.mgmt.datafactory.models.WebHookActivityMethod + :ivar url: Required. WebHook activity target endpoint and path. Type: string (or Expression with resultType string). - :type url: any - :param timeout: The timeout within which the webhook should be called back. If there is no - value specified, it defaults to 10 minutes. Type: string. Pattern: + :vartype url: any + :ivar timeout: The timeout within which the webhook should be called back. If there is no value + specified, it defaults to 10 minutes. Type: string. Pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: str - :param headers: Represents the headers that will be sent to the request. For example, to set - the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": + :vartype timeout: str + :ivar headers: Represents the headers that will be sent to the request. For example, to set the + language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - :type headers: any - :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT + :vartype headers: any + :ivar body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). - :type body: any - :param authentication: Authentication method used for calling the endpoint. - :type authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication - :param report_status_on_call_back: When set to true, statusCode, output and error in callback + :vartype body: any + :ivar authentication: Authentication method used for calling the endpoint. + :vartype authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication + :ivar report_status_on_call_back: When set to true, statusCode, output and error in callback request body will be consumed by activity. The activity can be marked as failed by setting statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with resultType boolean). - :type report_status_on_call_back: any + :vartype report_status_on_call_back: any """ _validation = { @@ -43071,6 +57567,43 @@ def __init__( report_status_on_call_back: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Required. Activity name. + :paramtype name: str + :keyword description: Activity description. + :paramtype description: str + :keyword depends_on: Activity depends on condition. + :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :keyword user_properties: Activity user properties. + :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword method: Required. Rest API method for target endpoint. Possible values include: + "POST". + :paramtype method: str or ~azure.mgmt.datafactory.models.WebHookActivityMethod + :keyword url: Required. WebHook activity target endpoint and path. Type: string (or Expression + with resultType string). + :paramtype url: any + :keyword timeout: The timeout within which the webhook should be called back. If there is no + value specified, it defaults to 10 minutes. Type: string. Pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype timeout: str + :keyword headers: Represents the headers that will be sent to the request. For example, to set + the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": + "application/json" }. Type: string (or Expression with resultType string). + :paramtype headers: any + :keyword body: Represents the payload that will be sent to the endpoint. Required for POST/PUT + method, not allowed for GET method Type: string (or Expression with resultType string). + :paramtype body: any + :keyword authentication: Authentication method used for calling the endpoint. + :paramtype authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication + :keyword report_status_on_call_back: When set to true, statusCode, output and error in callback + request body will be consumed by activity. The activity can be marked as failed by setting + statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with + resultType boolean). + :paramtype report_status_on_call_back: any + """ super(WebHookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.type = 'WebHook' # type: str self.method = method @@ -43087,21 +57620,21 @@ class WebLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param type_properties: Required. Web linked service properties. - :type type_properties: ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar type_properties: Required. Web linked service properties. + :vartype type_properties: ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties """ _validation = { @@ -43130,6 +57663,21 @@ def __init__( annotations: Optional[List[Any]] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword type_properties: Required. Web linked service properties. + :paramtype type_properties: ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties + """ super(WebLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Web' # type: str self.type_properties = type_properties @@ -43140,26 +57688,26 @@ class WebSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype disable_metrics_collection: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -43187,6 +57735,26 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'WebSource' # type: str self.additional_columns = additional_columns @@ -43197,34 +57765,34 @@ class WebTableDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param index: Required. The zero-based index of the table in the web page. Type: integer (or + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar index: Required. The zero-based index of the table in the web page. Type: integer (or Expression with resultType integer), minimum: 0. - :type index: any - :param path: The relative URL to the web page from the linked service URL. Type: string (or + :vartype index: any + :ivar path: The relative URL to the web page from the linked service URL. Type: string (or Expression with resultType string). - :type path: any + :vartype path: any """ _validation = { @@ -43262,6 +57830,34 @@ def __init__( path: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword index: Required. The zero-based index of the table in the web page. Type: integer (or + Expression with resultType integer), minimum: 0. + :paramtype index: any + :keyword path: The relative URL to the web page from the linked service URL. Type: string (or + Expression with resultType string). + :paramtype path: any + """ super(WebTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'WebTable' # type: str self.index = index @@ -43273,21 +57869,21 @@ class WranglingDataFlow(DataFlow): All required parameters must be populated in order to send to Azure. - :param type: Required. Type of data flow.Constant filled by server. - :type type: str - :param description: The description of the data flow. - :type description: str - :param annotations: List of tags that can be used for describing the data flow. - :type annotations: list[any] - :param folder: The folder that this data flow is in. If not specified, Data flow will appear at + :ivar type: Required. Type of data flow.Constant filled by server. + :vartype type: str + :ivar description: The description of the data flow. + :vartype description: str + :ivar annotations: List of tags that can be used for describing the data flow. + :vartype annotations: list[any] + :ivar folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder - :param sources: List of sources in Power Query. - :type sources: list[~azure.mgmt.datafactory.models.PowerQuerySource] - :param script: Power query mashup script. - :type script: str - :param document_locale: Locale of the Power query mashup document. - :type document_locale: str + :vartype folder: ~azure.mgmt.datafactory.models.DataFlowFolder + :ivar sources: List of sources in Power Query. + :vartype sources: list[~azure.mgmt.datafactory.models.PowerQuerySource] + :ivar script: Power query mashup script. + :vartype script: str + :ivar document_locale: Locale of the Power query mashup document. + :vartype document_locale: str """ _validation = { @@ -43315,6 +57911,21 @@ def __init__( document_locale: Optional[str] = None, **kwargs ): + """ + :keyword description: The description of the data flow. + :paramtype description: str + :keyword annotations: List of tags that can be used for describing the data flow. + :paramtype annotations: list[any] + :keyword folder: The folder that this data flow is in. If not specified, Data flow will appear + at the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DataFlowFolder + :keyword sources: List of sources in Power Query. + :paramtype sources: list[~azure.mgmt.datafactory.models.PowerQuerySource] + :keyword script: Power query mashup script. + :paramtype script: str + :keyword document_locale: Locale of the Power query mashup document. + :paramtype document_locale: str + """ super(WranglingDataFlow, self).__init__(description=description, annotations=annotations, folder=folder, **kwargs) self.type = 'WranglingDataFlow' # type: str self.sources = sources @@ -43327,44 +57938,44 @@ class XeroLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_properties: Properties used to connect to Xero. It is mutually exclusive with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_properties: Properties used to connect to Xero. It is mutually exclusive with any other properties in the linked service. Type: object. - :type connection_properties: any - :param host: The endpoint of the Xero server. (i.e. api.xero.com). - :type host: any - :param consumer_key: The consumer key associated with the Xero application. - :type consumer_key: ~azure.mgmt.datafactory.models.SecretBase - :param private_key: The private key from the .pem file that was generated for your Xero private + :vartype connection_properties: any + :ivar host: The endpoint of the Xero server. (i.e. api.xero.com). + :vartype host: any + :ivar consumer_key: The consumer key associated with the Xero application. + :vartype consumer_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar private_key: The private key from the .pem file that was generated for your Xero private application. You must include all the text from the .pem file, including the Unix line endings( ). - :type private_key: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype private_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -43406,6 +58017,45 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_properties: Properties used to connect to Xero. It is mutually exclusive + with any other properties in the linked service. Type: object. + :paramtype connection_properties: any + :keyword host: The endpoint of the Xero server. (i.e. api.xero.com). + :paramtype host: any + :keyword consumer_key: The consumer key associated with the Xero application. + :paramtype consumer_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword private_key: The private key from the .pem file that was generated for your Xero + private application. You must include all the text from the .pem file, including the Unix line + endings( + ). + :paramtype private_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(XeroLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Xero' # type: str self.connection_properties = connection_properties @@ -43423,30 +58073,30 @@ class XeroObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -43481,6 +58131,30 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(XeroObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'XeroObject' # type: str self.table_name = table_name @@ -43491,32 +58165,32 @@ class XeroSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -43548,6 +58222,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'XeroSource' # type: str self.query = query @@ -43558,40 +58258,40 @@ class XmlDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param location: The location of the json data storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param encoding_name: The code page name of the preferred encoding. If not specified, the + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar location: The location of the json data storage. + :vartype location: ~azure.mgmt.datafactory.models.DatasetLocation + :ivar encoding_name: The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). - :type encoding_name: any - :param null_value: The null value string. Type: string (or Expression with resultType string). - :type null_value: any - :param compression: The data compression method used for the json dataset. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + :vartype encoding_name: any + :ivar null_value: The null value string. Type: string (or Expression with resultType string). + :vartype null_value: any + :ivar compression: The data compression method used for the json dataset. + :vartype compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { @@ -43632,6 +58332,41 @@ def __init__( compression: Optional["DatasetCompression"] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword location: The location of the json data storage. + :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation + :keyword encoding_name: The code page name of the preferred encoding. If not specified, the + default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column + of the table in the following link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + resultType string). + :paramtype encoding_name: any + :keyword null_value: The null value string. Type: string (or Expression with resultType + string). + :paramtype null_value: any + :keyword compression: The data compression method used for the json dataset. + :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ super(XmlDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'Xml' # type: str self.location = location @@ -43645,27 +58380,27 @@ class XmlReadSettings(FormatReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param compression_properties: Compression settings. - :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings - :param validation_mode: Indicates what validation method is used when reading the xml files. + :vartype additional_properties: dict[str, any] + :ivar type: Required. The read setting type.Constant filled by server. + :vartype type: str + :ivar compression_properties: Compression settings. + :vartype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings + :ivar validation_mode: Indicates what validation method is used when reading the xml files. Allowed values: 'none', 'xsd', or 'dtd'. Type: string (or Expression with resultType string). - :type validation_mode: any - :param detect_data_type: Indicates whether type detection is enabled when reading the xml - files. Type: boolean (or Expression with resultType boolean). - :type detect_data_type: any - :param namespaces: Indicates whether namespace is enabled when reading the xml files. Type: + :vartype validation_mode: any + :ivar detect_data_type: Indicates whether type detection is enabled when reading the xml files. + Type: boolean (or Expression with resultType boolean). + :vartype detect_data_type: any + :ivar namespaces: Indicates whether namespace is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). - :type namespaces: any - :param namespace_prefixes: Namespace uri to prefix mappings to override the prefixes in column + :vartype namespaces: any + :ivar namespace_prefixes: Namespace uri to prefix mappings to override the prefixes in column names when namespace is enabled, if no prefix is defined for a namespace uri, the prefix of xml element/attribute name in the xml data file will be used. Example: "{"http://www.example.com/xml":"prefix"}" Type: object (or Expression with resultType object). - :type namespace_prefixes: any + :vartype namespace_prefixes: any """ _validation = { @@ -43693,6 +58428,27 @@ def __init__( namespace_prefixes: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword compression_properties: Compression settings. + :paramtype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings + :keyword validation_mode: Indicates what validation method is used when reading the xml files. + Allowed values: 'none', 'xsd', or 'dtd'. Type: string (or Expression with resultType string). + :paramtype validation_mode: any + :keyword detect_data_type: Indicates whether type detection is enabled when reading the xml + files. Type: boolean (or Expression with resultType boolean). + :paramtype detect_data_type: any + :keyword namespaces: Indicates whether namespace is enabled when reading the xml files. Type: + boolean (or Expression with resultType boolean). + :paramtype namespaces: any + :keyword namespace_prefixes: Namespace uri to prefix mappings to override the prefixes in + column names when namespace is enabled, if no prefix is defined for a namespace uri, the prefix + of xml element/attribute name in the xml data file will be used. Example: + "{"http://www.example.com/xml":"prefix"}" Type: object (or Expression with resultType object). + :paramtype namespace_prefixes: any + """ super(XmlReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'XmlReadSettings' # type: str self.compression_properties = compression_properties @@ -43707,30 +58463,30 @@ class XmlSource(CopySource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param store_settings: Xml store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param format_settings: Xml format settings. - :type format_settings: ~azure.mgmt.datafactory.models.XmlReadSettings - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype disable_metrics_collection: any + :ivar store_settings: Xml store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :ivar format_settings: Xml format settings. + :vartype format_settings: ~azure.mgmt.datafactory.models.XmlReadSettings + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any + :vartype additional_columns: any """ _validation = { @@ -43762,6 +58518,30 @@ def __init__( additional_columns: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword store_settings: Xml store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :keyword format_settings: Xml format settings. + :paramtype format_settings: ~azure.mgmt.datafactory.models.XmlReadSettings + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + """ super(XmlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'XmlSource' # type: str self.store_settings = store_settings @@ -43774,14 +58554,14 @@ class ZipDeflateReadSettings(CompressionReadSettings): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. The Compression setting type.Constant filled by server. - :type type: str - :param preserve_zip_file_name_as_folder: Preserve the zip file name as folder path. Type: + :vartype additional_properties: dict[str, any] + :ivar type: Required. The Compression setting type.Constant filled by server. + :vartype type: str + :ivar preserve_zip_file_name_as_folder: Preserve the zip file name as folder path. Type: boolean (or Expression with resultType boolean). - :type preserve_zip_file_name_as_folder: any + :vartype preserve_zip_file_name_as_folder: any """ _validation = { @@ -43801,6 +58581,14 @@ def __init__( preserve_zip_file_name_as_folder: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword preserve_zip_file_name_as_folder: Preserve the zip file name as folder path. Type: + boolean (or Expression with resultType boolean). + :paramtype preserve_zip_file_name_as_folder: any + """ super(ZipDeflateReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'ZipDeflateReadSettings' # type: str self.preserve_zip_file_name_as_folder = preserve_zip_file_name_as_folder @@ -43811,40 +58599,40 @@ class ZohoLinkedService(LinkedService): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[any] - :param connection_properties: Properties used to connect to Zoho. It is mutually exclusive with + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of linked service.Constant filled by server. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[any] + :ivar connection_properties: Properties used to connect to Zoho. It is mutually exclusive with any other properties in the linked service. Type: object. - :type connection_properties: any - :param endpoint: The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private). - :type endpoint: any - :param access_token: The access token for Zoho authentication. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + :vartype connection_properties: any + :ivar endpoint: The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private). + :vartype endpoint: any + :ivar access_token: The access token for Zoho authentication. + :vartype access_token: ~azure.mgmt.datafactory.models.SecretBase + :ivar use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: any - :param use_host_verification: Specifies whether to require the host name in the server's + :vartype use_encrypted_endpoints: any + :ivar use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: any - :param use_peer_verification: Specifies whether to verify the identity of the server when + :vartype use_host_verification: any + :ivar use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: any - :param encrypted_credential: The encrypted credential used for authentication. Credentials are + :vartype use_peer_verification: any + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: any + :vartype encrypted_credential: any """ _validation = { @@ -43884,6 +58672,40 @@ def __init__( encrypted_credential: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[any] + :keyword connection_properties: Properties used to connect to Zoho. It is mutually exclusive + with any other properties in the linked service. Type: object. + :paramtype connection_properties: any + :keyword endpoint: The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private). + :paramtype endpoint: any + :keyword access_token: The access token for Zoho authentication. + :paramtype access_token: ~azure.mgmt.datafactory.models.SecretBase + :keyword use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted + using HTTPS. The default value is true. + :paramtype use_encrypted_endpoints: any + :keyword use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :paramtype use_host_verification: any + :keyword use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :paramtype use_peer_verification: any + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string (or Expression + with resultType string). + :paramtype encrypted_credential: any + """ super(ZohoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Zoho' # type: str self.connection_properties = connection_properties @@ -43900,30 +58722,30 @@ class ZohoObjectDataset(Dataset): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression + :vartype additional_properties: dict[str, any] + :ivar type: Required. Type of dataset.Constant filled by server. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: any - :param schema: Columns that define the physical type schema of the dataset. Type: array (or + :vartype structure: any + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: any - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[any] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :vartype schema: any + :ivar linked_service_name: Required. Linked service reference. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[any] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: any + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table_name: The table name. Type: string (or Expression with resultType string). + :vartype table_name: any """ _validation = { @@ -43958,6 +58780,30 @@ def __init__( table_name: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: any + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: any + :keyword linked_service_name: Required. Linked service reference. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[any] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table_name: The table name. Type: string (or Expression with resultType string). + :paramtype table_name: any + """ super(ZohoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'ZohoObject' # type: str self.table_name = table_name @@ -43968,32 +58814,32 @@ class ZohoSource(TabularSource): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, any] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + :vartype additional_properties: dict[str, any] + :ivar type: Required. Copy source type.Constant filled by server. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: any - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + :vartype source_retry_count: any + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: any - :param max_concurrent_connections: The maximum concurrent connection count for the source data + :vartype source_retry_wait: any + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: any - :param disable_metrics_collection: If true, disable data store metrics collection. Default is + :vartype max_concurrent_connections: any + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). - :type disable_metrics_collection: any - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + :vartype disable_metrics_collection: any + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: any - :param additional_columns: Specifies the additional columns to be added to source data. Type: + :vartype query_timeout: any + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :type additional_columns: any - :param query: A query to retrieve data from source. Type: string (or Expression with resultType + :vartype additional_columns: any + :ivar query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: any + :vartype query: any """ _validation = { @@ -44025,6 +58871,32 @@ def __init__( query: Optional[Any] = None, **kwargs ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: any + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: any + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: any + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: any + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: any + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: any + :keyword query: A query to retrieve data from source. Type: string (or Expression with + resultType string). + :paramtype query: any + """ super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ZohoSource' # type: str self.query = query diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_activity_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_activity_runs_operations.py index 5c6675254c94..230a87fdd3e2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_activity_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_activity_runs_operations.py @@ -5,23 +5,73 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat +from msrest import Serializer from .. import models as _models +from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Optional, TypeVar - T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_query_by_pipeline_run_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + run_id, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/queryActivityruns') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "runId": _SERIALIZER.url("run_id", run_id, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + +# fmt: on class ActivityRunsOperations(object): """ActivityRunsOperations operations. @@ -44,6 +94,7 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def query_by_pipeline_run( self, resource_group_name, # type: str @@ -73,33 +124,23 @@ def query_by_pipeline_run( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.query_by_pipeline_run.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(filter_parameters, 'RunFilterParameters') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_query_by_pipeline_run_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + run_id=run_id, + content_type=content_type, + json=_json, + template_url=self.query_by_pipeline_run.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -113,4 +154,6 @@ def query_by_pipeline_run( return cls(pipeline_response, deserialized, {}) return deserialized + query_by_pipeline_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/queryActivityruns'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py index 84375fcbae30..b0a6ccfd00bb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py @@ -5,26 +5,230 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling +from msrest import Serializer from .. import models as _models +from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union - T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_create_request_initial( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_query_by_factory_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryDataFlowDebugSessions') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_add_data_flow_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/addDataFlowToDebugSession') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_delete_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/deleteDataFlowDebugSession') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_execute_command_request_initial( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + +# fmt: on class DataFlowDebugSessionOperations(object): """DataFlowDebugSessionOperations operations. @@ -60,32 +264,22 @@ def _create_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._create_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + _json = self._serialize.body(request, 'CreateDataFlowDebugSessionRequest') + + request = build_create_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + template_url=self._create_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(request, 'CreateDataFlowDebugSessionRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -93,20 +287,24 @@ def _create_initial( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - response_headers = {} deserialized = None + response_headers = {} if response.status_code == 200: deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', pipeline_response) if response.status_code == 202: response_headers['location']=self._deserialize('str', response.headers.get('location')) + if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession'} # type: ignore + + @distributed_trace def begin_create( self, resource_group_name, # type: str @@ -125,15 +323,20 @@ def begin_create( :type request: ~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionRequest :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either CreateDataFlowDebugSessionResponse or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionResponse] - :raises ~azure.core.exceptions.HttpResponseError: + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CreateDataFlowDebugSessionResponse or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionResponse] + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.CreateDataFlowDebugSessionResponse"] lro_delay = kwargs.pop( 'polling_interval', @@ -145,27 +348,21 @@ def begin_create( resource_group_name=resource_group_name, factory_name=factory_name, request=request, + content_type=content_type, cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): + response = pipeline_response.http_response deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -177,8 +374,10 @@ def get_long_running_output(pipeline_response): ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession'} # type: ignore + @distributed_trace def query_by_factory( self, resource_group_name, # type: str @@ -193,8 +392,10 @@ def query_by_factory( :param factory_name: The factory name. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either QueryDataFlowDebugSessionsResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.QueryDataFlowDebugSessionsResponse] + :return: An iterator like instance of either QueryDataFlowDebugSessionsResponse or the result + of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.QueryDataFlowDebugSessionsResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.QueryDataFlowDebugSessionsResponse"] @@ -202,36 +403,33 @@ def query_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.post(url, query_parameters, header_parameters) + + request = build_query_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=self.query_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_query_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('QueryDataFlowDebugSessionsResponse', pipeline_response) + deserialized = self._deserialize("QueryDataFlowDebugSessionsResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -249,11 +447,13 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged( get_next, extract_data ) query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryDataFlowDebugSessions'} # type: ignore + @distributed_trace def add_data_flow( self, resource_group_name, # type: str @@ -280,32 +480,22 @@ def add_data_flow( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.add_data_flow.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(request, 'DataFlowDebugPackage') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_add_data_flow_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + template_url=self.add_data_flow.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(request, 'DataFlowDebugPackage') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -319,8 +509,11 @@ def add_data_flow( return cls(pipeline_response, deserialized, {}) return deserialized + add_data_flow.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/addDataFlowToDebugSession'} # type: ignore + + @distributed_trace def delete( self, resource_group_name, # type: str @@ -347,32 +540,22 @@ def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(request, 'DeleteDataFlowDebugSessionRequest') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(request, 'DeleteDataFlowDebugSessionRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -385,6 +568,7 @@ def delete( delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/deleteDataFlowDebugSession'} # type: ignore + def _execute_command_initial( self, resource_group_name, # type: str @@ -398,32 +582,22 @@ def _execute_command_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._execute_command_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + _json = self._serialize.body(request, 'DataFlowDebugCommandRequest') + + request = build_execute_command_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + template_url=self._execute_command_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(request, 'DataFlowDebugCommandRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -431,20 +605,24 @@ def _execute_command_initial( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - response_headers = {} deserialized = None + response_headers = {} if response.status_code == 200: deserialized = self._deserialize('DataFlowDebugCommandResponse', pipeline_response) if response.status_code == 202: response_headers['location']=self._deserialize('str', response.headers.get('location')) + if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized + _execute_command_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand'} # type: ignore + + @distributed_trace def begin_execute_command( self, resource_group_name, # type: str @@ -463,15 +641,20 @@ def begin_execute_command( :type request: ~azure.mgmt.datafactory.models.DataFlowDebugCommandRequest :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either DataFlowDebugCommandResponse or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.DataFlowDebugCommandResponse] - :raises ~azure.core.exceptions.HttpResponseError: + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either DataFlowDebugCommandResponse or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.DataFlowDebugCommandResponse] + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.DataFlowDebugCommandResponse"] lro_delay = kwargs.pop( 'polling_interval', @@ -483,27 +666,21 @@ def begin_execute_command( resource_group_name=resource_group_name, factory_name=factory_name, request=request, + content_type=content_type, cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): + response = pipeline_response.http_response deserialized = self._deserialize('DataFlowDebugCommandResponse', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -515,4 +692,5 @@ def get_long_running_output(pipeline_response): ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_execute_command.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand'} # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flows_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flows_operations.py index d3dbbe8683aa..44a6ef89c132 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flows_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flows_operations.py @@ -5,24 +5,193 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat +from msrest import Serializer from .. import models as _models +from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_create_or_update_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + data_flow_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "dataFlowName": _SERIALIZER.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_get_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + data_flow_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "dataFlowName": _SERIALIZER.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_delete_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + data_flow_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "dataFlowName": _SERIALIZER.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_list_by_factory_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + +# fmt: on class DataFlowsOperations(object): """DataFlowsOperations operations. @@ -45,6 +214,7 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def create_or_update( self, resource_group_name, # type: str @@ -78,35 +248,24 @@ def create_or_update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(data_flow, 'DataFlowResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(data_flow, 'DataFlowResource') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + data_flow_name=data_flow_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.create_or_update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -120,8 +279,11 @@ def create_or_update( return cls(pipeline_response, deserialized, {}) return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore + + @distributed_trace def get( self, resource_group_name, # type: str @@ -152,30 +314,19 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + data_flow_name=data_flow_name, + if_none_match=if_none_match, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -189,8 +340,11 @@ def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore + + @distributed_trace def delete( self, resource_group_name, # type: str @@ -217,28 +371,18 @@ def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + data_flow_name=data_flow_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -251,6 +395,8 @@ def delete( delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore + + @distributed_trace def list_by_factory( self, resource_group_name, # type: str @@ -265,7 +411,8 @@ def list_by_factory( :param factory_name: The factory name. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DataFlowListResponse or the result of cls(response) + :return: An iterator like instance of either DataFlowListResponse or the result of + cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.DataFlowListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ @@ -274,36 +421,33 @@ def list_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=self.list_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('DataFlowListResponse', pipeline_response) + deserialized = self._deserialize("DataFlowListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -321,6 +465,7 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged( get_next, extract_data ) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_datasets_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_datasets_operations.py index 66c20b4d316f..5abbe1ab85cc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_datasets_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_datasets_operations.py @@ -5,24 +5,193 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat +from msrest import Serializer from .. import models as _models +from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_list_by_factory_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_create_or_update_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + dataset_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "datasetName": _SERIALIZER.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_get_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + dataset_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "datasetName": _SERIALIZER.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_delete_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + dataset_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "datasetName": _SERIALIZER.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + +# fmt: on class DatasetsOperations(object): """DatasetsOperations operations. @@ -45,6 +214,7 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def list_by_factory( self, resource_group_name, # type: str @@ -68,36 +238,33 @@ def list_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=self.list_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('DatasetListResponse', pipeline_response) + deserialized = self._deserialize("DatasetListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -115,11 +282,13 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged( get_next, extract_data ) list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets'} # type: ignore + @distributed_trace def create_or_update( self, resource_group_name, # type: str @@ -153,35 +322,24 @@ def create_or_update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(dataset, 'DatasetResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(dataset, 'DatasetResource') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + dataset_name=dataset_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.create_or_update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -195,8 +353,11 @@ def create_or_update( return cls(pipeline_response, deserialized, {}) return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore + + @distributed_trace def get( self, resource_group_name, # type: str @@ -227,30 +388,19 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + dataset_name=dataset_name, + if_none_match=if_none_match, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -266,8 +416,11 @@ def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore + + @distributed_trace def delete( self, resource_group_name, # type: str @@ -294,28 +447,18 @@ def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + dataset_name=dataset_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -327,3 +470,4 @@ def delete( return cls(pipeline_response, None, {}) delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_exposure_control_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_exposure_control_operations.py index 5790072a6751..1e402eee45c1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_exposure_control_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_exposure_control_operations.py @@ -5,23 +5,149 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat +from msrest import Serializer from .. import models as _models +from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Optional, TypeVar - T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_get_feature_value_request( + subscription_id, # type: str + location_id, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/getFeatureValue') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "locationId": _SERIALIZER.url("location_id", location_id, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_get_feature_value_by_factory_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getFeatureValue') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_query_feature_values_by_factory_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryFeaturesValue') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + +# fmt: on class ExposureControlOperations(object): """ExposureControlOperations operations. @@ -44,6 +170,7 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def get_feature_value( self, location_id, # type: str @@ -67,31 +194,21 @@ def get_feature_value( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.get_feature_value.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'locationId': self._serialize.url("location_id", location_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + _json = self._serialize.body(exposure_control_request, 'ExposureControlRequest') + + request = build_get_feature_value_request( + subscription_id=self._config.subscription_id, + location_id=location_id, + content_type=content_type, + json=_json, + template_url=self.get_feature_value.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -105,8 +222,11 @@ def get_feature_value( return cls(pipeline_response, deserialized, {}) return deserialized + get_feature_value.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/getFeatureValue'} # type: ignore + + @distributed_trace def get_feature_value_by_factory( self, resource_group_name, # type: str @@ -133,32 +253,22 @@ def get_feature_value_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.get_feature_value_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(exposure_control_request, 'ExposureControlRequest') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_get_feature_value_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + template_url=self.get_feature_value_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -172,8 +282,11 @@ def get_feature_value_by_factory( return cls(pipeline_response, deserialized, {}) return deserialized + get_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getFeatureValue'} # type: ignore + + @distributed_trace def query_feature_values_by_factory( self, resource_group_name, # type: str @@ -189,7 +302,8 @@ def query_feature_values_by_factory( :param factory_name: The factory name. :type factory_name: str :param exposure_control_batch_request: The exposure control request for list of features. - :type exposure_control_batch_request: ~azure.mgmt.datafactory.models.ExposureControlBatchRequest + :type exposure_control_batch_request: + ~azure.mgmt.datafactory.models.ExposureControlBatchRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ExposureControlBatchResponse, or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.ExposureControlBatchResponse @@ -200,32 +314,22 @@ def query_feature_values_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.query_feature_values_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + _json = self._serialize.body(exposure_control_batch_request, 'ExposureControlBatchRequest') + + request = build_query_feature_values_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + template_url=self.query_feature_values_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(exposure_control_batch_request, 'ExposureControlBatchRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -239,4 +343,6 @@ def query_feature_values_by_factory( return cls(pipeline_response, deserialized, {}) return deserialized + query_feature_values_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryFeaturesValue'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py index bebc303b4b8b..b4196c3456ab 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py @@ -5,24 +5,375 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat +from msrest import Serializer from .. import models as _models +from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_list_request( + subscription_id, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/factories') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_configure_factory_repo_request( + subscription_id, # type: str + location_id, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/configureFactoryRepo') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "locationId": _SERIALIZER.url("location_id", location_id, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_list_by_resource_group_request( + subscription_id, # type: str + resource_group_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_create_or_update_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_update_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PATCH", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_get_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_delete_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_get_git_hub_access_token_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getGitHubAccessToken') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_get_data_plane_access_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getDataPlaneAccess') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + +# fmt: on class FactoriesOperations(object): """FactoriesOperations operations. @@ -45,6 +396,7 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def list( self, **kwargs # type: Any @@ -62,34 +414,29 @@ def list( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + subscription_id=self._config.subscription_id, + template_url=self.list.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + subscription_id=self._config.subscription_id, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('FactoryListResponse', pipeline_response) + deserialized = self._deserialize("FactoryListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -107,11 +454,13 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged( get_next, extract_data ) list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/factories'} # type: ignore + @distributed_trace def configure_factory_repo( self, location_id, # type: str @@ -135,31 +484,21 @@ def configure_factory_repo( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.configure_factory_repo.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'locationId': self._serialize.url("location_id", location_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + _json = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate') + + request = build_configure_factory_repo_request( + subscription_id=self._config.subscription_id, + location_id=location_id, + content_type=content_type, + json=_json, + template_url=self.configure_factory_repo.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -173,8 +512,11 @@ def configure_factory_repo( return cls(pipeline_response, deserialized, {}) return deserialized + configure_factory_repo.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/configureFactoryRepo'} # type: ignore + + @distributed_trace def list_by_resource_group( self, resource_group_name, # type: str @@ -195,35 +537,31 @@ def list_by_resource_group( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_resource_group.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_resource_group_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + template_url=self.list_by_resource_group.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_resource_group_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('FactoryListResponse', pipeline_response) + deserialized = self._deserialize("FactoryListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -241,11 +579,13 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged( get_next, extract_data ) list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories'} # type: ignore + @distributed_trace def create_or_update( self, resource_group_name, # type: str @@ -276,34 +616,23 @@ def create_or_update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(factory, 'Factory') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(factory, 'Factory') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.create_or_update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -317,8 +646,11 @@ def create_or_update( return cls(pipeline_response, deserialized, {}) return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore + + @distributed_trace def update( self, resource_group_name, # type: str @@ -345,32 +677,22 @@ def update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + _json = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters') + + request = build_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + template_url=self.update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -384,8 +706,11 @@ def update( return cls(pipeline_response, deserialized, {}) return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore + + @distributed_trace def get( self, resource_group_name, # type: str @@ -413,29 +738,18 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + if_none_match=if_none_match, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -451,8 +765,11 @@ def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore + + @distributed_trace def delete( self, resource_group_name, # type: str @@ -476,27 +793,17 @@ def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -509,6 +816,8 @@ def delete( delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore + + @distributed_trace def get_git_hub_access_token( self, resource_group_name, # type: str @@ -535,32 +844,22 @@ def get_git_hub_access_token( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.get_git_hub_access_token.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_get_git_hub_access_token_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + template_url=self.get_git_hub_access_token.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -574,8 +873,11 @@ def get_git_hub_access_token( return cls(pipeline_response, deserialized, {}) return deserialized + get_git_hub_access_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getGitHubAccessToken'} # type: ignore + + @distributed_trace def get_data_plane_access( self, resource_group_name, # type: str @@ -602,32 +904,22 @@ def get_data_plane_access( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.get_data_plane_access.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(policy, 'UserAccessPolicy') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_get_data_plane_access_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + template_url=self.get_data_plane_access.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(policy, 'UserAccessPolicy') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -641,4 +933,6 @@ def get_data_plane_access( return cls(pipeline_response, deserialized, {}) return deserialized + get_data_plane_access.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getDataPlaneAccess'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_nodes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_nodes_operations.py index ff694dd9790e..4a3397ffe97d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_nodes_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_nodes_operations.py @@ -5,23 +5,195 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat +from msrest import Serializer from .. import models as _models +from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Optional, TypeVar - T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_get_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + node_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "nodeName": _SERIALIZER.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_delete_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + node_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "nodeName": _SERIALIZER.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_update_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + node_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "nodeName": _SERIALIZER.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PATCH", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_get_ip_address_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + node_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}/ipAddress') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "nodeName": _SERIALIZER.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + +# fmt: on class IntegrationRuntimeNodesOperations(object): """IntegrationRuntimeNodesOperations operations. @@ -44,6 +216,7 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def get( self, resource_group_name, # type: str @@ -73,29 +246,19 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + node_name=node_name, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -109,8 +272,11 @@ def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore + + @distributed_trace def delete( self, resource_group_name, # type: str @@ -140,29 +306,19 @@ def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + node_name=node_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -175,6 +331,8 @@ def delete( delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore + + @distributed_trace def update( self, resource_group_name, # type: str @@ -197,7 +355,8 @@ def update( :type node_name: str :param update_integration_runtime_node_request: The parameters for updating an integration runtime node. - :type update_integration_runtime_node_request: ~azure.mgmt.datafactory.models.UpdateIntegrationRuntimeNodeRequest + :type update_integration_runtime_node_request: + ~azure.mgmt.datafactory.models.UpdateIntegrationRuntimeNodeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode @@ -208,34 +367,24 @@ def update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + node_name=node_name, + content_type=content_type, + json=_json, + template_url=self.update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -249,8 +398,11 @@ def update( return cls(pipeline_response, deserialized, {}) return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore + + @distributed_trace def get_ip_address( self, resource_group_name, # type: str @@ -280,29 +432,19 @@ def get_ip_address( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get_ip_address.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_ip_address_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + node_name=node_name, + template_url=self.get_ip_address.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -316,4 +458,6 @@ def get_ip_address( return cls(pipeline_response, deserialized, {}) return deserialized + get_ip_address.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}/ipAddress'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py index 0a38967fb5e5..537920ba80f4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py @@ -5,25 +5,113 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling +from msrest import Serializer from .. import models as _models +from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union - T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_refresh_request_initial( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_get_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getObjectMetadata') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + +# fmt: on class IntegrationRuntimeObjectMetadataOperations(object): """IntegrationRuntimeObjectMetadataOperations operations. @@ -59,28 +147,18 @@ def _refresh_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self._refresh_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_refresh_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + template_url=self._refresh_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -96,8 +174,11 @@ def _refresh_initial( return cls(pipeline_response, deserialized, {}) return deserialized + _refresh_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata'} # type: ignore + + @distributed_trace def begin_refresh( self, resource_group_name, # type: str @@ -116,15 +197,19 @@ def begin_refresh( :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either SsisObjectMetadataStatusResponse or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.SsisObjectMetadataStatusResponse] - :raises ~azure.core.exceptions.HttpResponseError: + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either SsisObjectMetadataStatusResponse or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.SsisObjectMetadataStatusResponse] + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.SsisObjectMetadataStatusResponse"] lro_delay = kwargs.pop( 'polling_interval', @@ -139,25 +224,17 @@ def begin_refresh( cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): + response = pipeline_response.http_response deserialized = self._deserialize('SsisObjectMetadataStatusResponse', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -169,8 +246,10 @@ def get_long_running_output(pipeline_response): ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_refresh.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata'} # type: ignore + @distributed_trace def get( self, resource_group_name, # type: str @@ -201,36 +280,26 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - body_content_kwargs = {} # type: Dict[str, Any] if get_metadata_request is not None: - body_content = self._serialize.body(get_metadata_request, 'GetSsisObjectMetadataRequest') + _json = self._serialize.body(get_metadata_request, 'GetSsisObjectMetadataRequest') else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + _json = None + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + content_type=content_type, + json=_json, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -244,4 +313,6 @@ def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getObjectMetadata'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py index 2a61d232c982..35d6e667f6cf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py @@ -5,26 +5,705 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling +from msrest import Serializer from .. import models as _models +from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union - T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_list_by_factory_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_create_or_update_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_get_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_update_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PATCH", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_delete_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_get_status_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_list_outbound_network_dependencies_endpoints_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/outboundNetworkDependenciesEndpoints') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_get_connection_info_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_regenerate_auth_key_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_list_auth_keys_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_start_request_initial( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_stop_request_initial( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_sync_credentials_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_get_monitoring_data_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_upgrade_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_remove_links_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_create_linked_integration_runtime_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "integrationRuntimeName": _SERIALIZER.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + +# fmt: on class IntegrationRuntimesOperations(object): """IntegrationRuntimesOperations operations. @@ -47,6 +726,7 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def list_by_factory( self, resource_group_name, # type: str @@ -61,8 +741,10 @@ def list_by_factory( :param factory_name: The factory name. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either IntegrationRuntimeListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.IntegrationRuntimeListResponse] + :return: An iterator like instance of either IntegrationRuntimeListResponse or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.IntegrationRuntimeListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.IntegrationRuntimeListResponse"] @@ -70,36 +752,33 @@ def list_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=self.list_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('IntegrationRuntimeListResponse', pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -117,11 +796,13 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged( get_next, extract_data ) list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes'} # type: ignore + @distributed_trace def create_or_update( self, resource_group_name, # type: str @@ -155,35 +836,24 @@ def create_or_update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.create_or_update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -197,8 +867,11 @@ def create_or_update( return cls(pipeline_response, deserialized, {}) return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore + + @distributed_trace def get( self, resource_group_name, # type: str @@ -230,30 +903,19 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + if_none_match=if_none_match, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -269,8 +931,11 @@ def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore + + @distributed_trace def update( self, resource_group_name, # type: str @@ -289,7 +954,8 @@ def update( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :param update_integration_runtime_request: The parameters for updating an integration runtime. - :type update_integration_runtime_request: ~azure.mgmt.datafactory.models.UpdateIntegrationRuntimeRequest + :type update_integration_runtime_request: + ~azure.mgmt.datafactory.models.UpdateIntegrationRuntimeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeResource, or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource @@ -300,33 +966,23 @@ def update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + content_type=content_type, + json=_json, + template_url=self.update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -340,8 +996,11 @@ def update( return cls(pipeline_response, deserialized, {}) return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore + + @distributed_trace def delete( self, resource_group_name, # type: str @@ -368,28 +1027,18 @@ def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -402,6 +1051,8 @@ def delete( delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore + + @distributed_trace def get_status( self, resource_group_name, # type: str @@ -428,28 +1079,18 @@ def get_status( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get_status.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_status_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + template_url=self.get_status.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -463,8 +1104,11 @@ def get_status( return cls(pipeline_response, deserialized, {}) return deserialized + get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} # type: ignore + + @distributed_trace def list_outbound_network_dependencies_endpoints( self, resource_group_name, # type: str @@ -482,8 +1126,10 @@ def list_outbound_network_dependencies_endpoints( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, or the result of cls(response) - :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse + :return: IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, or the result of + cls(response) + :rtype: + ~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse"] @@ -491,28 +1137,18 @@ def list_outbound_network_dependencies_endpoints( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.list_outbound_network_dependencies_endpoints.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_list_outbound_network_dependencies_endpoints_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + template_url=self.list_outbound_network_dependencies_endpoints.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -526,8 +1162,11 @@ def list_outbound_network_dependencies_endpoints( return cls(pipeline_response, deserialized, {}) return deserialized + list_outbound_network_dependencies_endpoints.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/outboundNetworkDependenciesEndpoints'} # type: ignore + + @distributed_trace def get_connection_info( self, resource_group_name, # type: str @@ -555,28 +1194,18 @@ def get_connection_info( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get_connection_info.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_connection_info_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + template_url=self.get_connection_info.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -590,8 +1219,11 @@ def get_connection_info( return cls(pipeline_response, deserialized, {}) return deserialized + get_connection_info.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo'} # type: ignore + + @distributed_trace def regenerate_auth_key( self, resource_group_name, # type: str @@ -611,7 +1243,8 @@ def regenerate_auth_key( :type integration_runtime_name: str :param regenerate_key_parameters: The parameters for regenerating integration runtime authentication key. - :type regenerate_key_parameters: ~azure.mgmt.datafactory.models.IntegrationRuntimeRegenerateKeyParameters + :type regenerate_key_parameters: + ~azure.mgmt.datafactory.models.IntegrationRuntimeRegenerateKeyParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeAuthKeys, or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeys @@ -622,33 +1255,23 @@ def regenerate_auth_key( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.regenerate_auth_key.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_regenerate_auth_key_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + content_type=content_type, + json=_json, + template_url=self.regenerate_auth_key.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -662,8 +1285,11 @@ def regenerate_auth_key( return cls(pipeline_response, deserialized, {}) return deserialized + regenerate_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey'} # type: ignore + + @distributed_trace def list_auth_keys( self, resource_group_name, # type: str @@ -690,28 +1316,18 @@ def list_auth_keys( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.list_auth_keys.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_list_auth_keys_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + template_url=self.list_auth_keys.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -725,8 +1341,10 @@ def list_auth_keys( return cls(pipeline_response, deserialized, {}) return deserialized + list_auth_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys'} # type: ignore + def _start_initial( self, resource_group_name, # type: str @@ -740,28 +1358,18 @@ def _start_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self._start_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_start_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + template_url=self._start_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -777,8 +1385,11 @@ def _start_initial( return cls(pipeline_response, deserialized, {}) return deserialized + _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore + + @distributed_trace def begin_start( self, resource_group_name, # type: str @@ -797,15 +1408,19 @@ def begin_start( :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either IntegrationRuntimeStatusResponse or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] - :raises ~azure.core.exceptions.HttpResponseError: + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either IntegrationRuntimeStatusResponse or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.IntegrationRuntimeStatusResponse"] lro_delay = kwargs.pop( 'polling_interval', @@ -820,25 +1435,17 @@ def begin_start( cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): + response = pipeline_response.http_response deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -850,6 +1457,7 @@ def get_long_running_output(pipeline_response): ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore def _stop_initial( @@ -865,28 +1473,18 @@ def _stop_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self._stop_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_stop_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + template_url=self._stop_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -899,6 +1497,8 @@ def _stop_initial( _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore + + @distributed_trace def begin_stop( self, resource_group_name, # type: str @@ -917,15 +1517,17 @@ def begin_stop( :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', @@ -940,22 +1542,14 @@ def begin_stop( cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -967,8 +1561,10 @@ def get_long_running_output(pipeline_response): ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore + @distributed_trace def sync_credentials( self, resource_group_name, # type: str @@ -998,28 +1594,18 @@ def sync_credentials( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.sync_credentials.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_sync_credentials_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + template_url=self.sync_credentials.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -1032,6 +1618,8 @@ def sync_credentials( sync_credentials.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials'} # type: ignore + + @distributed_trace def get_monitoring_data( self, resource_group_name, # type: str @@ -1059,28 +1647,18 @@ def get_monitoring_data( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get_monitoring_data.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_monitoring_data_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + template_url=self.get_monitoring_data.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -1094,8 +1672,11 @@ def get_monitoring_data( return cls(pipeline_response, deserialized, {}) return deserialized + get_monitoring_data.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData'} # type: ignore + + @distributed_trace def upgrade( self, resource_group_name, # type: str @@ -1122,28 +1703,18 @@ def upgrade( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.upgrade.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_upgrade_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + template_url=self.upgrade.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -1156,6 +1727,8 @@ def upgrade( upgrade.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade'} # type: ignore + + @distributed_trace def remove_links( self, resource_group_name, # type: str @@ -1176,7 +1749,8 @@ def remove_links( :type integration_runtime_name: str :param linked_integration_runtime_request: The data factory name for the linked integration runtime. - :type linked_integration_runtime_request: ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeRequest + :type linked_integration_runtime_request: + ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None @@ -1187,33 +1761,23 @@ def remove_links( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.remove_links.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + _json = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') + + request = build_remove_links_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + content_type=content_type, + json=_json, + template_url=self.remove_links.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -1226,6 +1790,8 @@ def remove_links( remove_links.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks'} # type: ignore + + @distributed_trace def create_linked_integration_runtime( self, resource_group_name, # type: str @@ -1244,7 +1810,8 @@ def create_linked_integration_runtime( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :param create_linked_integration_runtime_request: The linked integration runtime properties. - :type create_linked_integration_runtime_request: ~azure.mgmt.datafactory.models.CreateLinkedIntegrationRuntimeRequest + :type create_linked_integration_runtime_request: + ~azure.mgmt.datafactory.models.CreateLinkedIntegrationRuntimeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeStatusResponse, or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse @@ -1255,33 +1822,23 @@ def create_linked_integration_runtime( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_linked_integration_runtime.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_create_linked_integration_runtime_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + content_type=content_type, + json=_json, + template_url=self.create_linked_integration_runtime.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -1295,4 +1852,6 @@ def create_linked_integration_runtime( return cls(pipeline_response, deserialized, {}) return deserialized + create_linked_integration_runtime.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_linked_services_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_linked_services_operations.py index 4fef887c4f01..3c85457b41ef 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_linked_services_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_linked_services_operations.py @@ -5,24 +5,193 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat +from msrest import Serializer from .. import models as _models +from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_list_by_factory_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_create_or_update_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + linked_service_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "linkedServiceName": _SERIALIZER.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_get_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + linked_service_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "linkedServiceName": _SERIALIZER.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_delete_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + linked_service_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "linkedServiceName": _SERIALIZER.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + +# fmt: on class LinkedServicesOperations(object): """LinkedServicesOperations operations. @@ -45,6 +214,7 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def list_by_factory( self, resource_group_name, # type: str @@ -59,7 +229,8 @@ def list_by_factory( :param factory_name: The factory name. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either LinkedServiceListResponse or the result of cls(response) + :return: An iterator like instance of either LinkedServiceListResponse or the result of + cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.LinkedServiceListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ @@ -68,36 +239,33 @@ def list_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=self.list_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('LinkedServiceListResponse', pipeline_response) + deserialized = self._deserialize("LinkedServiceListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -115,11 +283,13 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged( get_next, extract_data ) list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices'} # type: ignore + @distributed_trace def create_or_update( self, resource_group_name, # type: str @@ -153,35 +323,24 @@ def create_or_update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(linked_service, 'LinkedServiceResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(linked_service, 'LinkedServiceResource') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + linked_service_name=linked_service_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.create_or_update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -195,8 +354,11 @@ def create_or_update( return cls(pipeline_response, deserialized, {}) return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore + + @distributed_trace def get( self, resource_group_name, # type: str @@ -228,30 +390,19 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + linked_service_name=linked_service_name, + if_none_match=if_none_match, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -267,8 +418,11 @@ def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore + + @distributed_trace def delete( self, resource_group_name, # type: str @@ -295,28 +449,18 @@ def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + linked_service_name=linked_service_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -328,3 +472,4 @@ def delete( return cls(pipeline_response, None, {}) delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_private_endpoints_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_private_endpoints_operations.py index a4538611fb87..9345c2a00ba5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_private_endpoints_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_private_endpoints_operations.py @@ -5,24 +5,201 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat +from msrest import Serializer from .. import models as _models +from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_list_by_factory_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + managed_virtual_network_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "managedVirtualNetworkName": _SERIALIZER.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_create_or_update_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + managed_virtual_network_name, # type: str + managed_private_endpoint_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "managedVirtualNetworkName": _SERIALIZER.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + "managedPrivateEndpointName": _SERIALIZER.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_get_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + managed_virtual_network_name, # type: str + managed_private_endpoint_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "managedVirtualNetworkName": _SERIALIZER.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + "managedPrivateEndpointName": _SERIALIZER.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_delete_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + managed_virtual_network_name, # type: str + managed_private_endpoint_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "managedVirtualNetworkName": _SERIALIZER.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + "managedPrivateEndpointName": _SERIALIZER.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + +# fmt: on class ManagedPrivateEndpointsOperations(object): """ManagedPrivateEndpointsOperations operations. @@ -45,6 +222,7 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def list_by_factory( self, resource_group_name, # type: str @@ -62,8 +240,10 @@ def list_by_factory( :param managed_virtual_network_name: Managed virtual network name. :type managed_virtual_network_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ManagedPrivateEndpointListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.ManagedPrivateEndpointListResponse] + :return: An iterator like instance of either ManagedPrivateEndpointListResponse or the result + of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.ManagedPrivateEndpointListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagedPrivateEndpointListResponse"] @@ -71,37 +251,35 @@ def list_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + template_url=self.list_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('ManagedPrivateEndpointListResponse', pipeline_response) + deserialized = self._deserialize("ManagedPrivateEndpointListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -119,11 +297,13 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged( get_next, extract_data ) list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints'} # type: ignore + @distributed_trace def create_or_update( self, resource_group_name, # type: str @@ -160,36 +340,25 @@ def create_or_update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(managed_private_endpoint, 'ManagedPrivateEndpointResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(managed_private_endpoint, 'ManagedPrivateEndpointResource') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + managed_private_endpoint_name=managed_private_endpoint_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.create_or_update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -203,8 +372,11 @@ def create_or_update( return cls(pipeline_response, deserialized, {}) return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore + + @distributed_trace def get( self, resource_group_name, # type: str @@ -239,31 +411,20 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + managed_private_endpoint_name=managed_private_endpoint_name, + if_none_match=if_none_match, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -277,8 +438,11 @@ def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore + + @distributed_trace def delete( self, resource_group_name, # type: str @@ -308,29 +472,19 @@ def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + managed_private_endpoint_name=managed_private_endpoint_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -342,3 +496,4 @@ def delete( return cls(pipeline_response, None, {}) delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_virtual_networks_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_virtual_networks_operations.py index b545ec87a2a8..f65dc786591e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_virtual_networks_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_virtual_networks_operations.py @@ -5,24 +5,155 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat +from msrest import Serializer from .. import models as _models +from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_list_by_factory_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_create_or_update_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + managed_virtual_network_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "managedVirtualNetworkName": _SERIALIZER.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_get_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + managed_virtual_network_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "managedVirtualNetworkName": _SERIALIZER.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + +# fmt: on class ManagedVirtualNetworksOperations(object): """ManagedVirtualNetworksOperations operations. @@ -45,6 +176,7 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def list_by_factory( self, resource_group_name, # type: str @@ -59,8 +191,10 @@ def list_by_factory( :param factory_name: The factory name. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ManagedVirtualNetworkListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.ManagedVirtualNetworkListResponse] + :return: An iterator like instance of either ManagedVirtualNetworkListResponse or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.ManagedVirtualNetworkListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagedVirtualNetworkListResponse"] @@ -68,36 +202,33 @@ def list_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=self.list_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('ManagedVirtualNetworkListResponse', pipeline_response) + deserialized = self._deserialize("ManagedVirtualNetworkListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -115,11 +246,13 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged( get_next, extract_data ) list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks'} # type: ignore + @distributed_trace def create_or_update( self, resource_group_name, # type: str @@ -153,35 +286,24 @@ def create_or_update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(managed_virtual_network, 'ManagedVirtualNetworkResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(managed_virtual_network, 'ManagedVirtualNetworkResource') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.create_or_update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -195,8 +317,11 @@ def create_or_update( return cls(pipeline_response, deserialized, {}) return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore + + @distributed_trace def get( self, resource_group_name, # type: str @@ -228,30 +353,19 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + managed_virtual_network_name=managed_virtual_network_name, + if_none_match=if_none_match, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -265,4 +379,6 @@ def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_operations.py index d238d1b93271..8eb3c577141f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_operations.py @@ -5,24 +5,58 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat +from msrest import Serializer from .. import models as _models +from .._vendor import _convert_request if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_list_request( + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/providers/Microsoft.DataFactory/operations') + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + +# fmt: on class Operations(object): """Operations operations. @@ -45,6 +79,7 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def list( self, **kwargs # type: Any @@ -53,7 +88,8 @@ def list( """Lists the available Azure Data Factory API operations. :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either OperationListResponse or the result of cls(response) + :return: An iterator like instance of either OperationListResponse or the result of + cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.OperationListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ @@ -62,30 +98,27 @@ def list( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = build_list_request( + template_url=self.list.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('OperationListResponse', pipeline_response) + deserialized = self._deserialize("OperationListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -103,6 +136,7 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged( get_next, extract_data ) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipeline_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipeline_runs_operations.py index 8cff4bfca47c..19936a1b5a57 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipeline_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipeline_runs_operations.py @@ -5,23 +5,151 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat +from msrest import Serializer from .. import models as _models +from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Optional, TypeVar - T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_query_by_factory_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryPipelineRuns') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_get_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + run_id, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "runId": _SERIALIZER.url("run_id", run_id, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_cancel_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + run_id, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + is_recursive = kwargs.pop('is_recursive', None) # type: Optional[bool] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/cancel') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "runId": _SERIALIZER.url("run_id", run_id, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + if is_recursive is not None: + query_parameters['isRecursive'] = _SERIALIZER.query("is_recursive", is_recursive, 'bool') + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + +# fmt: on class PipelineRunsOperations(object): """PipelineRunsOperations operations. @@ -44,6 +172,7 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def query_by_factory( self, resource_group_name, # type: str @@ -70,32 +199,22 @@ def query_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(filter_parameters, 'RunFilterParameters') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_query_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + template_url=self.query_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -109,8 +228,11 @@ def query_by_factory( return cls(pipeline_response, deserialized, {}) return deserialized + query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryPipelineRuns'} # type: ignore + + @distributed_trace def get( self, resource_group_name, # type: str @@ -137,28 +259,18 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + run_id=run_id, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -172,8 +284,11 @@ def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}'} # type: ignore + + @distributed_trace def cancel( self, resource_group_name, # type: str @@ -204,30 +319,19 @@ def cancel( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.cancel.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if is_recursive is not None: - query_parameters['isRecursive'] = self._serialize.query("is_recursive", is_recursive, 'bool') - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_cancel_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + run_id=run_id, + is_recursive=is_recursive, + template_url=self.cancel.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -239,3 +343,4 @@ def cancel( return cls(pipeline_response, None, {}) cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/cancel'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py index 7f972501d499..155033202e85 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py @@ -5,24 +5,247 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat +from msrest import Serializer from .. import models as _models +from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_list_by_factory_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_create_or_update_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + pipeline_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "pipelineName": _SERIALIZER.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_get_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + pipeline_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "pipelineName": _SERIALIZER.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_delete_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + pipeline_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "pipelineName": _SERIALIZER.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_create_run_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + pipeline_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + reference_pipeline_run_id = kwargs.pop('reference_pipeline_run_id', None) # type: Optional[str] + is_recovery = kwargs.pop('is_recovery', None) # type: Optional[bool] + start_activity_name = kwargs.pop('start_activity_name', None) # type: Optional[str] + start_from_failure = kwargs.pop('start_from_failure', None) # type: Optional[bool] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}/createRun') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "pipelineName": _SERIALIZER.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if reference_pipeline_run_id is not None: + query_parameters['referencePipelineRunId'] = _SERIALIZER.query("reference_pipeline_run_id", reference_pipeline_run_id, 'str') + if is_recovery is not None: + query_parameters['isRecovery'] = _SERIALIZER.query("is_recovery", is_recovery, 'bool') + if start_activity_name is not None: + query_parameters['startActivityName'] = _SERIALIZER.query("start_activity_name", start_activity_name, 'str') + if start_from_failure is not None: + query_parameters['startFromFailure'] = _SERIALIZER.query("start_from_failure", start_from_failure, 'bool') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + +# fmt: on class PipelinesOperations(object): """PipelinesOperations operations. @@ -45,6 +268,7 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def list_by_factory( self, resource_group_name, # type: str @@ -59,7 +283,8 @@ def list_by_factory( :param factory_name: The factory name. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PipelineListResponse or the result of cls(response) + :return: An iterator like instance of either PipelineListResponse or the result of + cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.PipelineListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ @@ -68,36 +293,33 @@ def list_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=self.list_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('PipelineListResponse', pipeline_response) + deserialized = self._deserialize("PipelineListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -115,11 +337,13 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged( get_next, extract_data ) list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines'} # type: ignore + @distributed_trace def create_or_update( self, resource_group_name, # type: str @@ -153,35 +377,24 @@ def create_or_update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(pipeline, 'PipelineResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(pipeline, 'PipelineResource') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + pipeline_name=pipeline_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.create_or_update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -195,8 +408,11 @@ def create_or_update( return cls(pipeline_response, deserialized, {}) return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore + + @distributed_trace def get( self, resource_group_name, # type: str @@ -227,30 +443,19 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + pipeline_name=pipeline_name, + if_none_match=if_none_match, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -266,8 +471,11 @@ def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore + + @distributed_trace def delete( self, resource_group_name, # type: str @@ -294,28 +502,18 @@ def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + pipeline_name=pipeline_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -328,6 +526,8 @@ def delete( delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore + + @distributed_trace def create_run( self, resource_group_name, # type: str @@ -374,44 +574,30 @@ def create_run( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_run.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if reference_pipeline_run_id is not None: - query_parameters['referencePipelineRunId'] = self._serialize.query("reference_pipeline_run_id", reference_pipeline_run_id, 'str') - if is_recovery is not None: - query_parameters['isRecovery'] = self._serialize.query("is_recovery", is_recovery, 'bool') - if start_activity_name is not None: - query_parameters['startActivityName'] = self._serialize.query("start_activity_name", start_activity_name, 'str') - if start_from_failure is not None: - query_parameters['startFromFailure'] = self._serialize.query("start_from_failure", start_from_failure, 'bool') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + if parameters is not None: - body_content = self._serialize.body(parameters, '{object}') + _json = self._serialize.body(parameters, '{object}') else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + _json = None + + request = build_create_run_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + pipeline_name=pipeline_name, + content_type=content_type, + json=_json, + reference_pipeline_run_id=reference_pipeline_run_id, + is_recovery=is_recovery, + start_activity_name=start_activity_name, + start_from_failure=start_from_failure, + template_url=self.create_run.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -425,4 +611,6 @@ def create_run( return cls(pipeline_response, deserialized, {}) return deserialized + create_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}/createRun'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_end_point_connections_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_end_point_connections_operations.py index 4f8b24284394..373322791cf1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_end_point_connections_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_end_point_connections_operations.py @@ -5,24 +5,68 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat +from msrest import Serializer from .. import models as _models +from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_list_by_factory_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndPointConnections') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + +# fmt: on class PrivateEndPointConnectionsOperations(object): """PrivateEndPointConnectionsOperations operations. @@ -45,6 +89,7 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def list_by_factory( self, resource_group_name, # type: str @@ -59,8 +104,10 @@ def list_by_factory( :param factory_name: The factory name. :type factory_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PrivateEndpointConnectionListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.PrivateEndpointConnectionListResponse] + :return: An iterator like instance of either PrivateEndpointConnectionListResponse or the + result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.PrivateEndpointConnectionListResponse] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnectionListResponse"] @@ -68,36 +115,33 @@ def list_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=self.list_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('PrivateEndpointConnectionListResponse', pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnectionListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -115,6 +159,7 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged( get_next, extract_data ) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_endpoint_connection_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_endpoint_connection_operations.py index deb6a5acb95e..8d67498117ba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_endpoint_connection_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_endpoint_connection_operations.py @@ -5,23 +5,156 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat +from msrest import Serializer from .. import models as _models +from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Optional, TypeVar - T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_create_or_update_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + private_endpoint_connection_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "privateEndpointConnectionName": _SERIALIZER.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_get_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + private_endpoint_connection_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "privateEndpointConnectionName": _SERIALIZER.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_delete_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + private_endpoint_connection_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "privateEndpointConnectionName": _SERIALIZER.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + +# fmt: on class PrivateEndpointConnectionOperations(object): """PrivateEndpointConnectionOperations operations. @@ -44,6 +177,7 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def create_or_update( self, resource_group_name, # type: str @@ -63,7 +197,8 @@ def create_or_update( :param private_endpoint_connection_name: The private endpoint connection name. :type private_endpoint_connection_name: str :param private_endpoint_wrapper: - :type private_endpoint_wrapper: ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequestResource + :type private_endpoint_wrapper: + ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequestResource :param if_match: ETag of the private endpoint connection entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str @@ -77,35 +212,24 @@ def create_or_update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(private_endpoint_wrapper, 'PrivateLinkConnectionApprovalRequestResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(private_endpoint_wrapper, 'PrivateLinkConnectionApprovalRequestResource') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + private_endpoint_connection_name=private_endpoint_connection_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.create_or_update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -119,8 +243,11 @@ def create_or_update( return cls(pipeline_response, deserialized, {}) return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + @distributed_trace def get( self, resource_group_name, # type: str @@ -152,30 +279,19 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + private_endpoint_connection_name=private_endpoint_connection_name, + if_none_match=if_none_match, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -189,8 +305,11 @@ def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + @distributed_trace def delete( self, resource_group_name, # type: str @@ -217,28 +336,18 @@ def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + private_endpoint_connection_name=private_endpoint_connection_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -250,3 +359,4 @@ def delete( return cls(pipeline_response, None, {}) delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_link_resources_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_link_resources_operations.py index 125681ace87f..4f7a12a913fd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_link_resources_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_link_resources_operations.py @@ -5,23 +5,67 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat +from msrest import Serializer from .. import models as _models +from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Optional, TypeVar - T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_get_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateLinkResources') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + +# fmt: on class PrivateLinkResourcesOperations(object): """PrivateLinkResourcesOperations operations. @@ -44,6 +88,7 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def get( self, resource_group_name, # type: str @@ -67,27 +112,17 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -101,4 +136,6 @@ def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateLinkResources'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_trigger_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_trigger_runs_operations.py index 36b73ca60878..45742e994ab9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_trigger_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_trigger_runs_operations.py @@ -5,23 +5,151 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat +from msrest import Serializer from .. import models as _models +from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Optional, TypeVar - T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_rerun_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + run_id, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "triggerName": _SERIALIZER.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "runId": _SERIALIZER.url("run_id", run_id, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_cancel_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + run_id, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/cancel') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "triggerName": _SERIALIZER.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + "runId": _SERIALIZER.url("run_id", run_id, 'str'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_query_by_factory_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryTriggerRuns') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + +# fmt: on class TriggerRunsOperations(object): """TriggerRunsOperations operations. @@ -44,6 +172,7 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def rerun( self, resource_group_name, # type: str @@ -73,29 +202,19 @@ def rerun( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.rerun.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_rerun_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + run_id=run_id, + template_url=self.rerun.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -108,6 +227,8 @@ def rerun( rerun.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun'} # type: ignore + + @distributed_trace def cancel( self, resource_group_name, # type: str @@ -137,29 +258,19 @@ def cancel( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.cancel.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_cancel_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + run_id=run_id, + template_url=self.cancel.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -172,6 +283,8 @@ def cancel( cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/cancel'} # type: ignore + + @distributed_trace def query_by_factory( self, resource_group_name, # type: str @@ -198,32 +311,22 @@ def query_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(filter_parameters, 'RunFilterParameters') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_query_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + template_url=self.query_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -237,4 +340,6 @@ def query_by_factory( return cls(pipeline_response, deserialized, {}) return deserialized + query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryTriggerRuns'} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py index cbc17f0eb3e8..f010fbfe2767 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py @@ -5,26 +5,425 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling +from msrest import Serializer from .. import models as _models +from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union - T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_list_by_factory_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_query_by_factory_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_create_or_update_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + content_type = kwargs.pop('content_type', None) # type: Optional[str] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "triggerName": _SERIALIZER.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if content_type is not None: + header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_get_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "triggerName": _SERIALIZER.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_delete_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "triggerName": _SERIALIZER.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_subscribe_to_events_request_initial( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "triggerName": _SERIALIZER.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_get_event_subscription_status_request( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "triggerName": _SERIALIZER.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_unsubscribe_from_events_request_initial( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "triggerName": _SERIALIZER.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_start_request_initial( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "triggerName": _SERIALIZER.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + + +def build_stop_request_initial( + subscription_id, # type: str + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + api_version = "2018-06-01" + accept = "application/json" + # Construct URL + url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop') + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "triggerName": _SERIALIZER.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + url = _format_url_section(url, **path_format_arguments) + + # Construct parameters + query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=url, + params=query_parameters, + headers=header_parameters, + **kwargs + ) + +# fmt: on class TriggersOperations(object): """TriggersOperations operations. @@ -47,6 +446,7 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + @distributed_trace def list_by_factory( self, resource_group_name, # type: str @@ -70,36 +470,33 @@ def list_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=self.list_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('TriggerListResponse', pipeline_response) + deserialized = self._deserialize("TriggerListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -117,11 +514,13 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged( get_next, extract_data ) list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers'} # type: ignore + @distributed_trace def query_by_factory( self, resource_group_name, # type: str @@ -148,32 +547,22 @@ def query_by_factory( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(filter_parameters, 'TriggerFilterParameters') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + request = build_query_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + content_type=content_type, + json=_json, + template_url=self.query_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'TriggerFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -187,8 +576,11 @@ def query_by_factory( return cls(pipeline_response, deserialized, {}) return deserialized + query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers'} # type: ignore + + @distributed_trace def create_or_update( self, resource_group_name, # type: str @@ -222,35 +614,24 @@ def create_or_update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(trigger, 'TriggerResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(trigger, 'TriggerResource') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + content_type=content_type, + json=_json, + if_match=if_match, + template_url=self.create_or_update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -264,8 +645,11 @@ def create_or_update( return cls(pipeline_response, deserialized, {}) return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore + + @distributed_trace def get( self, resource_group_name, # type: str @@ -296,30 +680,19 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + if_none_match=if_none_match, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -335,8 +708,11 @@ def get( return cls(pipeline_response, deserialized, {}) return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore + + @distributed_trace def delete( self, resource_group_name, # type: str @@ -363,28 +739,18 @@ def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -397,6 +763,7 @@ def delete( delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore + def _subscribe_to_events_initial( self, resource_group_name, # type: str @@ -410,28 +777,18 @@ def _subscribe_to_events_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self._subscribe_to_events_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_subscribe_to_events_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + template_url=self._subscribe_to_events_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -447,8 +804,11 @@ def _subscribe_to_events_initial( return cls(pipeline_response, deserialized, {}) return deserialized + _subscribe_to_events_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore + + @distributed_trace def begin_subscribe_to_events( self, resource_group_name, # type: str @@ -467,15 +827,19 @@ def begin_subscribe_to_events( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] - :raises ~azure.core.exceptions.HttpResponseError: + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either TriggerSubscriptionOperationStatus or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.TriggerSubscriptionOperationStatus"] lro_delay = kwargs.pop( 'polling_interval', @@ -490,25 +854,17 @@ def begin_subscribe_to_events( cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): + response = pipeline_response.http_response deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -520,8 +876,10 @@ def get_long_running_output(pipeline_response): ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_subscribe_to_events.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore + @distributed_trace def get_event_subscription_status( self, resource_group_name, # type: str @@ -548,28 +906,18 @@ def get_event_subscription_status( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self.get_event_subscription_status.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_get_event_subscription_status_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + template_url=self.get_event_subscription_status.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -583,8 +931,10 @@ def get_event_subscription_status( return cls(pipeline_response, deserialized, {}) return deserialized + get_event_subscription_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus'} # type: ignore + def _unsubscribe_from_events_initial( self, resource_group_name, # type: str @@ -598,28 +948,18 @@ def _unsubscribe_from_events_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self._unsubscribe_from_events_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_unsubscribe_from_events_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + template_url=self._unsubscribe_from_events_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -635,8 +975,11 @@ def _unsubscribe_from_events_initial( return cls(pipeline_response, deserialized, {}) return deserialized + _unsubscribe_from_events_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore + + @distributed_trace def begin_unsubscribe_from_events( self, resource_group_name, # type: str @@ -655,15 +998,19 @@ def begin_unsubscribe_from_events( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] - :raises ~azure.core.exceptions.HttpResponseError: + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either TriggerSubscriptionOperationStatus or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.TriggerSubscriptionOperationStatus"] lro_delay = kwargs.pop( 'polling_interval', @@ -678,25 +1025,17 @@ def begin_unsubscribe_from_events( cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): + response = pipeline_response.http_response deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -708,6 +1047,7 @@ def get_long_running_output(pipeline_response): ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_unsubscribe_from_events.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore def _start_initial( @@ -723,28 +1063,18 @@ def _start_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self._start_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_start_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + template_url=self._start_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -757,6 +1087,8 @@ def _start_initial( _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore + + @distributed_trace def begin_start( self, resource_group_name, # type: str @@ -775,15 +1107,17 @@ def begin_start( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', @@ -798,22 +1132,14 @@ def begin_start( cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -825,6 +1151,7 @@ def get_long_running_output(pipeline_response): ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore def _stop_initial( @@ -840,28 +1167,18 @@ def _stop_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - accept = "application/json" - - # Construct URL - url = self._stop_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_stop_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + template_url=self._stop_initial.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -874,6 +1191,8 @@ def _stop_initial( _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore + + @distributed_trace def begin_stop( self, resource_group_name, # type: str @@ -892,15 +1211,17 @@ def begin_stop( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', @@ -915,22 +1236,14 @@ def begin_stop( cls=lambda x,y,z: x, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -942,4 +1255,5 @@ def get_long_running_output(pipeline_response): ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore