diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index f8279c1a99bd..f08086a1e9f9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -96,6 +96,7 @@ from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger from .azure_function_linked_service_py3 import AzureFunctionLinkedService from .azure_data_explorer_linked_service_py3 import AzureDataExplorerLinkedService + from .sap_table_linked_service_py3 import SapTableLinkedService from .google_ad_words_linked_service_py3 import GoogleAdWordsLinkedService from .oracle_service_cloud_linked_service_py3 import OracleServiceCloudLinkedService from .dynamics_ax_linked_service_py3 import DynamicsAXLinkedService @@ -230,9 +231,11 @@ from .http_dataset_py3 import HttpDataset from .azure_search_index_dataset_py3 import AzureSearchIndexDataset from .web_table_dataset_py3 import WebTableDataset + from .sap_table_resource_dataset_py3 import SapTableResourceDataset from .rest_resource_dataset_py3 import RestResourceDataset from .sql_server_table_dataset_py3 import SqlServerTableDataset from .sap_open_hub_table_dataset_py3 import SapOpenHubTableDataset + from .sap_hana_table_dataset_py3 import SapHanaTableDataset from .sap_ecc_resource_dataset_py3 import SapEccResourceDataset from .sap_cloud_for_customer_resource_dataset_py3 import SapCloudForCustomerResourceDataset from .salesforce_object_dataset_py3 import SalesforceObjectDataset @@ -255,6 +258,18 @@ from .azure_sql_table_dataset_py3 import AzureSqlTableDataset from .azure_table_dataset_py3 import AzureTableDataset from .azure_blob_dataset_py3 import AzureBlobDataset + from .hdfs_location_py3 import HdfsLocation + from .http_server_location_py3 import HttpServerLocation + from .sftp_location_py3 import SftpLocation + from .ftp_server_location_py3 import FtpServerLocation + from .file_server_location_py3 import FileServerLocation + from .amazon_s3_location_py3 import AmazonS3Location + from .azure_data_lake_store_location_py3 import AzureDataLakeStoreLocation + from .azure_blob_fs_location_py3 import AzureBlobFSLocation + from .azure_blob_storage_location_py3 import AzureBlobStorageLocation + from .dataset_location_py3 import DatasetLocation + from .delimited_text_dataset_py3 import DelimitedTextDataset + from .parquet_dataset_py3 import ParquetDataset from .amazon_s3_dataset_py3 import AmazonS3Dataset from .activity_policy_py3 import ActivityPolicy from .azure_function_activity_py3 import AzureFunctionActivity @@ -313,6 +328,7 @@ from .mongo_db_source_py3 import MongoDbSource from .cassandra_source_py3 import CassandraSource from .web_source_py3 import WebSource + from .oracle_partition_settings_py3 import OraclePartitionSettings from .oracle_source_py3 import OracleSource from .azure_data_explorer_source_py3 import AzureDataExplorerSource from .azure_my_sql_source_py3 import AzureMySqlSource @@ -321,9 +337,14 @@ from .file_system_source_py3 import FileSystemSource from .sql_dw_source_py3 import SqlDWSource from .stored_procedure_parameter_py3 import StoredProcedureParameter + from .azure_sql_source_py3 import AzureSqlSource + from .sql_server_source_py3 import SqlServerSource from .sql_source_py3 import SqlSource from .rest_source_py3 import RestSource + from .sap_table_partition_settings_py3 import SapTablePartitionSettings + from .sap_table_source_py3 import SapTableSource from .sap_open_hub_source_py3 import SapOpenHubSource + from .sap_hana_source_py3 import SapHanaSource from .sap_ecc_source_py3 import SapEccSource from .sap_cloud_for_customer_source_py3 import SapCloudForCustomerSource from .salesforce_source_py3 import SalesforceSource @@ -332,6 +353,20 @@ from .document_db_collection_source_py3 import DocumentDbCollectionSource from .blob_source_py3 import BlobSource from .azure_table_source_py3 import AzureTableSource + from .format_read_setting_py3 import FormatReadSetting + from .delimited_text_read_setting_py3 import DelimitedTextReadSetting + from .hdfs_read_setting_py3 import HdfsReadSetting + from .http_read_setting_py3 import HttpReadSetting + from .sftp_read_setting_py3 import SftpReadSetting + from .ftp_read_setting_py3 import FtpReadSetting + from .file_server_read_setting_py3 import FileServerReadSetting + from .amazon_s3_read_setting_py3 import AmazonS3ReadSetting + from .azure_data_lake_store_read_setting_py3 import AzureDataLakeStoreReadSetting + from .azure_blob_fs_read_setting_py3 import AzureBlobFSReadSetting + from .azure_blob_storage_read_setting_py3 import AzureBlobStorageReadSetting + from .connector_read_setting_py3 import ConnectorReadSetting + from .delimited_text_source_py3 import DelimitedTextSource + from .parquet_source_py3 import ParquetSource from .copy_source_py3 import CopySource from .lookup_activity_py3 import LookupActivity from .log_storage_settings_py3 import LogStorageSettings @@ -351,8 +386,6 @@ from .hd_insight_hive_activity_py3 import HDInsightHiveActivity from .redirect_incompatible_row_settings_py3 import RedirectIncompatibleRowSettings from .staging_settings_py3 import StagingSettings - from .tabular_translator_py3 import TabularTranslator - from .copy_translator_py3 import CopyTranslator from .cosmos_db_mongo_db_api_sink_py3 import CosmosDbMongoDbApiSink from .salesforce_sink_py3 import SalesforceSink from .azure_data_explorer_sink_py3 import AzureDataExplorerSink @@ -364,13 +397,24 @@ from .oracle_sink_py3 import OracleSink from .polybase_settings_py3 import PolybaseSettings from .sql_dw_sink_py3 import SqlDWSink + from .azure_sql_sink_py3 import AzureSqlSink + from .sql_server_sink_py3 import SqlServerSink from .sql_sink_py3 import SqlSink from .document_db_collection_sink_py3 import DocumentDbCollectionSink from .file_system_sink_py3 import FileSystemSink from .blob_sink_py3 import BlobSink + from .file_server_write_setting_py3 import FileServerWriteSetting + from .azure_data_lake_store_write_setting_py3 import AzureDataLakeStoreWriteSetting + from .azure_blob_fs_write_setting_py3 import AzureBlobFSWriteSetting + from .azure_blob_storage_write_setting_py3 import AzureBlobStorageWriteSetting + from .connector_write_setting_py3 import ConnectorWriteSetting + from .parquet_sink_py3 import ParquetSink from .azure_table_sink_py3 import AzureTableSink from .azure_queue_sink_py3 import AzureQueueSink from .sap_cloud_for_customer_sink_py3 import SapCloudForCustomerSink + from .format_write_setting_py3 import FormatWriteSetting + from .delimited_text_write_setting_py3 import DelimitedTextWriteSetting + from .delimited_text_sink_py3 import DelimitedTextSink from .copy_sink_py3 import CopySink from .copy_activity_py3 import CopyActivity from .execution_activity_py3 import ExecutionActivity @@ -396,6 +440,8 @@ from .linked_integration_runtime_key_authorization_py3 import LinkedIntegrationRuntimeKeyAuthorization from .linked_integration_runtime_type_py3 import LinkedIntegrationRuntimeType from .self_hosted_integration_runtime_py3 import SelfHostedIntegrationRuntime + from .entity_reference_py3 import EntityReference + from .integration_runtime_data_proxy_properties_py3 import IntegrationRuntimeDataProxyProperties from .integration_runtime_custom_setup_script_properties_py3 import IntegrationRuntimeCustomSetupScriptProperties from .integration_runtime_ssis_catalog_info_py3 import IntegrationRuntimeSsisCatalogInfo from .integration_runtime_ssis_properties_py3 import IntegrationRuntimeSsisProperties @@ -504,6 +550,7 @@ from .multiple_pipeline_trigger import MultiplePipelineTrigger from .azure_function_linked_service import AzureFunctionLinkedService from .azure_data_explorer_linked_service import AzureDataExplorerLinkedService + from .sap_table_linked_service import SapTableLinkedService from .google_ad_words_linked_service import GoogleAdWordsLinkedService from .oracle_service_cloud_linked_service import OracleServiceCloudLinkedService from .dynamics_ax_linked_service import DynamicsAXLinkedService @@ -638,9 +685,11 @@ from .http_dataset import HttpDataset from .azure_search_index_dataset import AzureSearchIndexDataset from .web_table_dataset import WebTableDataset + from .sap_table_resource_dataset import SapTableResourceDataset from .rest_resource_dataset import RestResourceDataset from .sql_server_table_dataset import SqlServerTableDataset from .sap_open_hub_table_dataset import SapOpenHubTableDataset + from .sap_hana_table_dataset import SapHanaTableDataset from .sap_ecc_resource_dataset import SapEccResourceDataset from .sap_cloud_for_customer_resource_dataset import SapCloudForCustomerResourceDataset from .salesforce_object_dataset import SalesforceObjectDataset @@ -663,6 +712,18 @@ from .azure_sql_table_dataset import AzureSqlTableDataset from .azure_table_dataset import AzureTableDataset from .azure_blob_dataset import AzureBlobDataset + from .hdfs_location import HdfsLocation + from .http_server_location import HttpServerLocation + from .sftp_location import SftpLocation + from .ftp_server_location import FtpServerLocation + from .file_server_location import FileServerLocation + from .amazon_s3_location import AmazonS3Location + from .azure_data_lake_store_location import AzureDataLakeStoreLocation + from .azure_blob_fs_location import AzureBlobFSLocation + from .azure_blob_storage_location import AzureBlobStorageLocation + from .dataset_location import DatasetLocation + from .delimited_text_dataset import DelimitedTextDataset + from .parquet_dataset import ParquetDataset from .amazon_s3_dataset import AmazonS3Dataset from .activity_policy import ActivityPolicy from .azure_function_activity import AzureFunctionActivity @@ -721,6 +782,7 @@ from .mongo_db_source import MongoDbSource from .cassandra_source import CassandraSource from .web_source import WebSource + from .oracle_partition_settings import OraclePartitionSettings from .oracle_source import OracleSource from .azure_data_explorer_source import AzureDataExplorerSource from .azure_my_sql_source import AzureMySqlSource @@ -729,9 +791,14 @@ from .file_system_source import FileSystemSource from .sql_dw_source import SqlDWSource from .stored_procedure_parameter import StoredProcedureParameter + from .azure_sql_source import AzureSqlSource + from .sql_server_source import SqlServerSource from .sql_source import SqlSource from .rest_source import RestSource + from .sap_table_partition_settings import SapTablePartitionSettings + from .sap_table_source import SapTableSource from .sap_open_hub_source import SapOpenHubSource + from .sap_hana_source import SapHanaSource from .sap_ecc_source import SapEccSource from .sap_cloud_for_customer_source import SapCloudForCustomerSource from .salesforce_source import SalesforceSource @@ -740,6 +807,20 @@ from .document_db_collection_source import DocumentDbCollectionSource from .blob_source import BlobSource from .azure_table_source import AzureTableSource + from .format_read_setting import FormatReadSetting + from .delimited_text_read_setting import DelimitedTextReadSetting + from .hdfs_read_setting import HdfsReadSetting + from .http_read_setting import HttpReadSetting + from .sftp_read_setting import SftpReadSetting + from .ftp_read_setting import FtpReadSetting + from .file_server_read_setting import FileServerReadSetting + from .amazon_s3_read_setting import AmazonS3ReadSetting + from .azure_data_lake_store_read_setting import AzureDataLakeStoreReadSetting + from .azure_blob_fs_read_setting import AzureBlobFSReadSetting + from .azure_blob_storage_read_setting import AzureBlobStorageReadSetting + from .connector_read_setting import ConnectorReadSetting + from .delimited_text_source import DelimitedTextSource + from .parquet_source import ParquetSource from .copy_source import CopySource from .lookup_activity import LookupActivity from .log_storage_settings import LogStorageSettings @@ -759,8 +840,6 @@ from .hd_insight_hive_activity import HDInsightHiveActivity from .redirect_incompatible_row_settings import RedirectIncompatibleRowSettings from .staging_settings import StagingSettings - from .tabular_translator import TabularTranslator - from .copy_translator import CopyTranslator from .cosmos_db_mongo_db_api_sink import CosmosDbMongoDbApiSink from .salesforce_sink import SalesforceSink from .azure_data_explorer_sink import AzureDataExplorerSink @@ -772,13 +851,24 @@ from .oracle_sink import OracleSink from .polybase_settings import PolybaseSettings from .sql_dw_sink import SqlDWSink + from .azure_sql_sink import AzureSqlSink + from .sql_server_sink import SqlServerSink from .sql_sink import SqlSink from .document_db_collection_sink import DocumentDbCollectionSink from .file_system_sink import FileSystemSink from .blob_sink import BlobSink + from .file_server_write_setting import FileServerWriteSetting + from .azure_data_lake_store_write_setting import AzureDataLakeStoreWriteSetting + from .azure_blob_fs_write_setting import AzureBlobFSWriteSetting + from .azure_blob_storage_write_setting import AzureBlobStorageWriteSetting + from .connector_write_setting import ConnectorWriteSetting + from .parquet_sink import ParquetSink from .azure_table_sink import AzureTableSink from .azure_queue_sink import AzureQueueSink from .sap_cloud_for_customer_sink import SapCloudForCustomerSink + from .format_write_setting import FormatWriteSetting + from .delimited_text_write_setting import DelimitedTextWriteSetting + from .delimited_text_sink import DelimitedTextSink from .copy_sink import CopySink from .copy_activity import CopyActivity from .execution_activity import ExecutionActivity @@ -804,6 +894,8 @@ from .linked_integration_runtime_key_authorization import LinkedIntegrationRuntimeKeyAuthorization from .linked_integration_runtime_type import LinkedIntegrationRuntimeType from .self_hosted_integration_runtime import SelfHostedIntegrationRuntime + from .entity_reference import EntityReference + from .integration_runtime_data_proxy_properties import IntegrationRuntimeDataProxyProperties from .integration_runtime_custom_setup_script_properties import IntegrationRuntimeCustomSetupScriptProperties from .integration_runtime_ssis_catalog_info import IntegrationRuntimeSsisCatalogInfo from .integration_runtime_ssis_properties import IntegrationRuntimeSsisProperties @@ -874,25 +966,18 @@ TeradataAuthenticationType, Db2AuthenticationType, SybaseAuthenticationType, - DatasetCompressionLevel, - JsonFormatFilePattern, AzureFunctionActivityMethod, WebActivityMethod, - CassandraSourceReadConsistencyLevels, StoredProcedureParameterType, - SalesforceSourceReadBehavior, HDInsightActivityDebugInfoOption, - SalesforceSinkWriteBehavior, - AzureSearchIndexWriteBehaviorType, - CopyBehaviorType, PolybaseSettingsRejectType, - SapCloudForCustomerSinkWriteBehavior, WebHookActivityMethod, IntegrationRuntimeType, SelfHostedIntegrationRuntimeNodeStatus, IntegrationRuntimeUpdateResult, IntegrationRuntimeInternalChannelEncryptionMode, ManagedIntegrationRuntimeNodeStatus, + IntegrationRuntimeEntityReferenceType, IntegrationRuntimeSsisCatalogPricingTier, IntegrationRuntimeLicenseType, IntegrationRuntimeEdition, @@ -987,6 +1072,7 @@ 'MultiplePipelineTrigger', 'AzureFunctionLinkedService', 'AzureDataExplorerLinkedService', + 'SapTableLinkedService', 'GoogleAdWordsLinkedService', 'OracleServiceCloudLinkedService', 'DynamicsAXLinkedService', @@ -1121,9 +1207,11 @@ 'HttpDataset', 'AzureSearchIndexDataset', 'WebTableDataset', + 'SapTableResourceDataset', 'RestResourceDataset', 'SqlServerTableDataset', 'SapOpenHubTableDataset', + 'SapHanaTableDataset', 'SapEccResourceDataset', 'SapCloudForCustomerResourceDataset', 'SalesforceObjectDataset', @@ -1146,6 +1234,18 @@ 'AzureSqlTableDataset', 'AzureTableDataset', 'AzureBlobDataset', + 'HdfsLocation', + 'HttpServerLocation', + 'SftpLocation', + 'FtpServerLocation', + 'FileServerLocation', + 'AmazonS3Location', + 'AzureDataLakeStoreLocation', + 'AzureBlobFSLocation', + 'AzureBlobStorageLocation', + 'DatasetLocation', + 'DelimitedTextDataset', + 'ParquetDataset', 'AmazonS3Dataset', 'ActivityPolicy', 'AzureFunctionActivity', @@ -1204,6 +1304,7 @@ 'MongoDbSource', 'CassandraSource', 'WebSource', + 'OraclePartitionSettings', 'OracleSource', 'AzureDataExplorerSource', 'AzureMySqlSource', @@ -1212,9 +1313,14 @@ 'FileSystemSource', 'SqlDWSource', 'StoredProcedureParameter', + 'AzureSqlSource', + 'SqlServerSource', 'SqlSource', 'RestSource', + 'SapTablePartitionSettings', + 'SapTableSource', 'SapOpenHubSource', + 'SapHanaSource', 'SapEccSource', 'SapCloudForCustomerSource', 'SalesforceSource', @@ -1223,6 +1329,20 @@ 'DocumentDbCollectionSource', 'BlobSource', 'AzureTableSource', + 'FormatReadSetting', + 'DelimitedTextReadSetting', + 'HdfsReadSetting', + 'HttpReadSetting', + 'SftpReadSetting', + 'FtpReadSetting', + 'FileServerReadSetting', + 'AmazonS3ReadSetting', + 'AzureDataLakeStoreReadSetting', + 'AzureBlobFSReadSetting', + 'AzureBlobStorageReadSetting', + 'ConnectorReadSetting', + 'DelimitedTextSource', + 'ParquetSource', 'CopySource', 'LookupActivity', 'LogStorageSettings', @@ -1242,8 +1362,6 @@ 'HDInsightHiveActivity', 'RedirectIncompatibleRowSettings', 'StagingSettings', - 'TabularTranslator', - 'CopyTranslator', 'CosmosDbMongoDbApiSink', 'SalesforceSink', 'AzureDataExplorerSink', @@ -1255,13 +1373,24 @@ 'OracleSink', 'PolybaseSettings', 'SqlDWSink', + 'AzureSqlSink', + 'SqlServerSink', 'SqlSink', 'DocumentDbCollectionSink', 'FileSystemSink', 'BlobSink', + 'FileServerWriteSetting', + 'AzureDataLakeStoreWriteSetting', + 'AzureBlobFSWriteSetting', + 'AzureBlobStorageWriteSetting', + 'ConnectorWriteSetting', + 'ParquetSink', 'AzureTableSink', 'AzureQueueSink', 'SapCloudForCustomerSink', + 'FormatWriteSetting', + 'DelimitedTextWriteSetting', + 'DelimitedTextSink', 'CopySink', 'CopyActivity', 'ExecutionActivity', @@ -1287,6 +1416,8 @@ 'LinkedIntegrationRuntimeKeyAuthorization', 'LinkedIntegrationRuntimeType', 'SelfHostedIntegrationRuntime', + 'EntityReference', + 'IntegrationRuntimeDataProxyProperties', 'IntegrationRuntimeCustomSetupScriptProperties', 'IntegrationRuntimeSsisCatalogInfo', 'IntegrationRuntimeSsisProperties', @@ -1356,25 +1487,18 @@ 'TeradataAuthenticationType', 'Db2AuthenticationType', 'SybaseAuthenticationType', - 'DatasetCompressionLevel', - 'JsonFormatFilePattern', 'AzureFunctionActivityMethod', 'WebActivityMethod', - 'CassandraSourceReadConsistencyLevels', 'StoredProcedureParameterType', - 'SalesforceSourceReadBehavior', 'HDInsightActivityDebugInfoOption', - 'SalesforceSinkWriteBehavior', - 'AzureSearchIndexWriteBehaviorType', - 'CopyBehaviorType', 'PolybaseSettingsRejectType', - 'SapCloudForCustomerSinkWriteBehavior', 'WebHookActivityMethod', 'IntegrationRuntimeType', 'SelfHostedIntegrationRuntimeNodeStatus', 'IntegrationRuntimeUpdateResult', 'IntegrationRuntimeInternalChannelEncryptionMode', 'ManagedIntegrationRuntimeNodeStatus', + 'IntegrationRuntimeEntityReferenceType', 'IntegrationRuntimeSsisCatalogPricingTier', 'IntegrationRuntimeLicenseType', 'IntegrationRuntimeEdition', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py new file mode 100644 index 000000000000..74c77a16f0f2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class AmazonS3Location(DatasetLocation): + """The location of amazon S3 dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param bucket_name: Specify the bucketName of amazon S3. Type: string (or + Expression with resultType string) + :type bucket_name: object + :param version: Specify the version of amazon S3. Type: string (or + Expression with resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonS3Location, self).__init__(**kwargs) + self.bucket_name = kwargs.get('bucket_name', None) + self.version = kwargs.get('version', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py new file mode 100644 index 000000000000..36afce341ada --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AmazonS3Location(DatasetLocation): + """The location of amazon S3 dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param bucket_name: Specify the bucketName of amazon S3. Type: string (or + Expression with resultType string) + :type bucket_name: object + :param version: Specify the version of amazon S3. Type: string (or + Expression with resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, bucket_name=None, version=None, **kwargs) -> None: + super(AmazonS3Location, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.bucket_name = bucket_name + self.version = version diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting.py new file mode 100644 index 000000000000..4de7e0ebb7b9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class AmazonS3ReadSetting(ConnectorReadSetting): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the S3 object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonS3ReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.prefix = kwargs.get('prefix', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting_py3.py new file mode 100644 index 000000000000..deda331ea561 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class AmazonS3ReadSetting(ConnectorReadSetting): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the S3 object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AmazonS3ReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.prefix = prefix + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py new file mode 100644 index 000000000000..c21525bbac4c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class AzureBlobFSLocation(DatasetLocation): + """The location of azure blobFS dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param file_system: Specify the fileSystem of azure blobFS. Type: string + (or Expression with resultType string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'file_system': {'key': 'fileSystem', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSLocation, self).__init__(**kwargs) + self.file_system = kwargs.get('file_system', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py new file mode 100644 index 000000000000..afbae52fdeb0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AzureBlobFSLocation(DatasetLocation): + """The location of azure blobFS dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param file_system: Specify the fileSystem of azure blobFS. Type: string + (or Expression with resultType string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'file_system': {'key': 'fileSystem', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, file_system=None, **kwargs) -> None: + super(AzureBlobFSLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.file_system = file_system diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting.py new file mode 100644 index 000000000000..11490a288417 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class AzureBlobFSReadSetting(ConnectorReadSetting): + """Azure blobFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting_py3.py new file mode 100644 index 000000000000..28f3b4f7ceb4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class AzureBlobFSReadSetting(ConnectorReadSetting): + """Azure blobFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureBlobFSReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py index 59e070c64fe8..a47b173c6581 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py @@ -40,10 +40,8 @@ class AzureBlobFSSink(CopySink): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -58,7 +56,7 @@ class AzureBlobFSSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py index 35ad6a97dbfe..e2b28bf30a8c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py @@ -40,10 +40,8 @@ class AzureBlobFSSink(CopySink): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -58,7 +56,7 @@ class AzureBlobFSSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting.py new file mode 100644 index 000000000000..d5b2d850da58 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting import ConnectorWriteSetting + + +class AzureBlobFSWriteSetting(ConnectorWriteSetting): + """Azure blobFS write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSWriteSetting, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting_py3.py new file mode 100644 index 000000000000..62196ff73838 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting_py3 import ConnectorWriteSetting + + +class AzureBlobFSWriteSetting(ConnectorWriteSetting): + """Azure blobFS write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureBlobFSWriteSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py new file mode 100644 index 000000000000..1efbbeaec352 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class AzureBlobStorageLocation(DatasetLocation): + """The location of azure blob dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param container: Specify the container of azure blob. Type: string (or + Expression with resultType string). + :type container: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'container': {'key': 'container', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobStorageLocation, self).__init__(**kwargs) + self.container = kwargs.get('container', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py new file mode 100644 index 000000000000..63b122573039 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AzureBlobStorageLocation(DatasetLocation): + """The location of azure blob dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param container: Specify the container of azure blob. Type: string (or + Expression with resultType string). + :type container: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'container': {'key': 'container', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, container=None, **kwargs) -> None: + super(AzureBlobStorageLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.container = container diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting.py new file mode 100644 index 000000000000..ee07a3576f29 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class AzureBlobStorageReadSetting(ConnectorReadSetting): + """Azure blob read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobStorageReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting_py3.py new file mode 100644 index 000000000000..3e3d35774a46 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class AzureBlobStorageReadSetting(ConnectorReadSetting): + """Azure blob read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureBlobStorageReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting.py new file mode 100644 index 000000000000..a6499dfda798 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting import ConnectorWriteSetting + + +class AzureBlobStorageWriteSetting(ConnectorWriteSetting): + """Azure blob write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobStorageWriteSetting, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting_py3.py new file mode 100644 index 000000000000..9abb68c06055 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting_py3 import ConnectorWriteSetting + + +class AzureBlobStorageWriteSetting(ConnectorWriteSetting): + """Azure blob write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureBlobStorageWriteSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py new file mode 100644 index 000000000000..a4bf521a2005 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class AzureDataLakeStoreLocation(DatasetLocation): + """The location of azure data lake store dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py new file mode 100644 index 000000000000..e7955731fc31 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AzureDataLakeStoreLocation(DatasetLocation): + """The location of azure data lake store dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(AzureDataLakeStoreLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting.py new file mode 100644 index 000000000000..0f0dfe7f7c58 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class AzureDataLakeStoreReadSetting(ConnectorReadSetting): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: ADLS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting_py3.py new file mode 100644 index 000000000000..b9159463d681 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class AzureDataLakeStoreReadSetting(ConnectorReadSetting): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: ADLS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureDataLakeStoreReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py index 145c7c61358a..e882698c2ca6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py @@ -40,10 +40,10 @@ class AzureDataLakeStoreSink(CopySink): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param enable_adls_single_file_parallel: Single File Parallel. + :type enable_adls_single_file_parallel: object """ _validation = { @@ -58,10 +58,12 @@ class AzureDataLakeStoreSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } def __init__(self, **kwargs): super(AzureDataLakeStoreSink, self).__init__(**kwargs) self.copy_behavior = kwargs.get('copy_behavior', None) + self.enable_adls_single_file_parallel = kwargs.get('enable_adls_single_file_parallel', None) self.type = 'AzureDataLakeStoreSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py index d3e16339fef2..0f96cea725e2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py @@ -40,10 +40,10 @@ class AzureDataLakeStoreSink(CopySink): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param enable_adls_single_file_parallel: Single File Parallel. + :type enable_adls_single_file_parallel: object """ _validation = { @@ -58,10 +58,12 @@ class AzureDataLakeStoreSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, enable_adls_single_file_parallel=None, **kwargs) -> None: super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.copy_behavior = copy_behavior + self.enable_adls_single_file_parallel = enable_adls_single_file_parallel self.type = 'AzureDataLakeStoreSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting.py new file mode 100644 index 000000000000..d7875f545e77 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting import ConnectorWriteSetting + + +class AzureDataLakeStoreWriteSetting(ConnectorWriteSetting): + """Azure data lake store write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreWriteSetting, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting_py3.py new file mode 100644 index 000000000000..e05ddcbaeaac --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting_py3 import ConnectorWriteSetting + + +class AzureDataLakeStoreWriteSetting(ConnectorWriteSetting): + """Azure data lake store write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureDataLakeStoreWriteSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py index 9aae64af8da0..af2505be7a5c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py @@ -41,9 +41,8 @@ class AzureSearchIndexSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specify the write behavior when upserting documents - into Azure Search Index. Possible values include: 'Merge', 'Upload' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType + into Azure Search Index. + :type write_behavior: object """ _validation = { @@ -58,7 +57,7 @@ class AzureSearchIndexSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py index 3cd887a2512c..9e57f2f1feb3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py @@ -41,9 +41,8 @@ class AzureSearchIndexSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specify the write behavior when upserting documents - into Azure Search Index. Possible values include: 'Merge', 'Upload' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType + into Azure Search Index. + :type write_behavior: object """ _validation = { @@ -58,7 +57,7 @@ class AzureSearchIndexSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py index e12464efdd49..0921505515d5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py @@ -43,15 +43,14 @@ class AzureSqlDWTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: Required. The table name of the Azure SQL Data - Warehouse. Type: string (or Expression with resultType string). + :param table_name: The table name of the Azure SQL Data Warehouse. Type: + string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py index a06073f86c5b..0be72998fc64 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py @@ -43,15 +43,14 @@ class AzureSqlDWTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: Required. The table name of the Azure SQL Data - Warehouse. Type: string (or Expression with resultType string). + :param table_name: The table name of the Azure SQL Data Warehouse. Type: + string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { @@ -67,7 +66,7 @@ class AzureSqlDWTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(AzureSqlDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'AzureSqlDWTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py new file mode 100644 index 000000000000..441bf0c4279f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureSqlSink(CopySink): + """A copy activity Azure SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlSink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.type = 'AzureSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py new file mode 100644 index 000000000000..6aa431ae57d6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureSqlSink(CopySink): + """A copy activity Azure SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: + super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.type = 'AzureSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py new file mode 100644 index 000000000000..b6c62f9a3164 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureSqlSource(CopySource): + """A copy activity Azure SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlSource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.type = 'AzureSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py new file mode 100644 index 000000000000..cb5c33d28bb2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureSqlSource(CopySource): + """A copy activity Azure SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types + self.type = 'AzureSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py index 9a078241d620..8d8d90d76e85 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py @@ -43,15 +43,14 @@ class AzureSqlTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: Required. The table name of the Azure SQL database. - Type: string (or Expression with resultType string). + :param table_name: The table name of the Azure SQL database. Type: string + (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py index b3c388202f52..3d4f1eac3f58 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py @@ -43,15 +43,14 @@ class AzureSqlTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: Required. The table name of the Azure SQL database. - Type: string (or Expression with resultType string). + :param table_name: The table name of the Azure SQL database. Type: string + (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { @@ -67,7 +66,7 @@ class AzureSqlTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'AzureSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py index 8a050cf9cc64..284e0fcecde5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py @@ -49,10 +49,8 @@ class BlobSink(CopySink): :param blob_writer_add_header: Blob writer add header. Type: boolean (or Expression with resultType boolean). :type blob_writer_add_header: object - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -70,7 +68,7 @@ class BlobSink(CopySink): 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py index 8fca0ac5cacc..370acc72e017 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py @@ -49,10 +49,8 @@ class BlobSink(CopySink): :param blob_writer_add_header: Blob writer add header. Type: boolean (or Expression with resultType boolean). :type blob_writer_add_header: object - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -70,7 +68,7 @@ class BlobSink(CopySink): 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py index e7ba96c18682..8a52f03cd5ba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py @@ -42,11 +42,8 @@ class CassandraSource(CopySource): the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the read request. Must be one of cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is - case-insensitive. Possible values include: 'ALL', 'EACH_QUORUM', 'QUORUM', - 'LOCAL_QUORUM', 'ONE', 'TWO', 'THREE', 'LOCAL_ONE', 'SERIAL', - 'LOCAL_SERIAL' - :type consistency_level: str or - ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels + case-insensitive. + :type consistency_level: object """ _validation = { @@ -60,7 +57,7 @@ class CassandraSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, - 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, + 'consistency_level': {'key': 'consistencyLevel', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py index bd95d158b868..6957385bab86 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py @@ -42,11 +42,8 @@ class CassandraSource(CopySource): the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the read request. Must be one of cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is - case-insensitive. Possible values include: 'ALL', 'EACH_QUORUM', 'QUORUM', - 'LOCAL_QUORUM', 'ONE', 'TWO', 'THREE', 'LOCAL_ONE', 'SERIAL', - 'LOCAL_SERIAL' - :type consistency_level: str or - ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels + case-insensitive. + :type consistency_level: object """ _validation = { @@ -60,7 +57,7 @@ class CassandraSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, - 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, + 'consistency_level': {'key': 'consistencyLevel', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, consistency_level=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting.py new file mode 100644 index 000000000000..e0af269aaafd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ConnectorReadSetting(Model): + """Connector read setting. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ConnectorReadSetting, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting_py3.py new file mode 100644 index 000000000000..f6403ade8f71 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ConnectorReadSetting(Model): + """Connector read setting. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, **kwargs) -> None: + super(ConnectorReadSetting, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.max_concurrent_connections = max_concurrent_connections diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting.py new file mode 100644 index 000000000000..65daf9f07794 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ConnectorWriteSetting(Model): + """Connector write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ConnectorWriteSetting, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.copy_behavior = kwargs.get('copy_behavior', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting_py3.py new file mode 100644 index 000000000000..7f4ea65c916d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ConnectorWriteSetting(Model): + """Connector write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(ConnectorWriteSetting, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.max_concurrent_connections = max_concurrent_connections + self.copy_behavior = copy_behavior diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py index f73d34fcb3ce..2e7c00d551ba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py @@ -41,7 +41,7 @@ class CopyActivity(ExecutionActivity): :type sink: ~azure.mgmt.datafactory.models.CopySink :param translator: Copy activity translator. If not specified, tabular translator is used. - :type translator: ~azure.mgmt.datafactory.models.CopyTranslator + :type translator: object :param enable_staging: Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). @@ -67,6 +67,8 @@ class CopyActivity(ExecutionActivity): ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] + :param preserve: Preserve rules. + :type preserve: list[object] :param inputs: List of inputs for the activity. :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] :param outputs: List of outputs for the activity. @@ -91,7 +93,7 @@ class CopyActivity(ExecutionActivity): 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, - 'translator': {'key': 'typeProperties.translator', 'type': 'CopyTranslator'}, + 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, @@ -99,6 +101,7 @@ class CopyActivity(ExecutionActivity): 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, + 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, } @@ -115,6 +118,7 @@ def __init__(self, **kwargs): self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) self.preserve_rules = kwargs.get('preserve_rules', None) + self.preserve = kwargs.get('preserve', None) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) self.type = 'Copy' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py index a02cd5d89e10..f8a1fee5625d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py @@ -41,7 +41,7 @@ class CopyActivity(ExecutionActivity): :type sink: ~azure.mgmt.datafactory.models.CopySink :param translator: Copy activity translator. If not specified, tabular translator is used. - :type translator: ~azure.mgmt.datafactory.models.CopyTranslator + :type translator: object :param enable_staging: Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). @@ -67,6 +67,8 @@ class CopyActivity(ExecutionActivity): ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] + :param preserve: Preserve rules. + :type preserve: list[object] :param inputs: List of inputs for the activity. :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] :param outputs: List of outputs for the activity. @@ -91,7 +93,7 @@ class CopyActivity(ExecutionActivity): 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, - 'translator': {'key': 'typeProperties.translator', 'type': 'CopyTranslator'}, + 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, @@ -99,11 +101,12 @@ class CopyActivity(ExecutionActivity): 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, + 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, } - def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, preserve_rules=None, inputs=None, outputs=None, **kwargs) -> None: + def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, preserve_rules=None, preserve=None, inputs=None, outputs=None, **kwargs) -> None: super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.source = source self.sink = sink @@ -115,6 +118,7 @@ def __init__(self, *, name: str, source, sink, additional_properties=None, descr self.enable_skip_incompatible_row = enable_skip_incompatible_row self.redirect_incompatible_row_settings = redirect_incompatible_row_settings self.preserve_rules = preserve_rules + self.preserve = preserve self.inputs = inputs self.outputs = outputs self.type = 'Copy' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py index 6f714f7947d1..43117547e1ea 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py @@ -18,9 +18,10 @@ class CopySink(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: CosmosDbMongoDbApiSink, SalesforceSink, AzureDataExplorerSink, DynamicsSink, OdbcSink, AzureSearchIndexSink, - AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, SqlSink, - DocumentDbCollectionSink, FileSystemSink, BlobSink, AzureTableSink, - AzureQueueSink, SapCloudForCustomerSink + AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, + AzureSqlSink, SqlServerSink, SqlSink, DocumentDbCollectionSink, + FileSystemSink, BlobSink, ParquetSink, AzureTableSink, AzureQueueSink, + SapCloudForCustomerSink, DelimitedTextSink All required parameters must be populated in order to send to Azure. @@ -64,7 +65,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py index ea4f320e9bc6..4f9ebc84173c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py @@ -18,9 +18,10 @@ class CopySink(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: CosmosDbMongoDbApiSink, SalesforceSink, AzureDataExplorerSink, DynamicsSink, OdbcSink, AzureSearchIndexSink, - AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, SqlSink, - DocumentDbCollectionSink, FileSystemSink, BlobSink, AzureTableSink, - AzureQueueSink, SapCloudForCustomerSink + AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, + AzureSqlSink, SqlServerSink, SqlSink, DocumentDbCollectionSink, + FileSystemSink, BlobSink, ParquetSink, AzureTableSink, AzureQueueSink, + SapCloudForCustomerSink, DelimitedTextSink All required parameters must be populated in order to send to Azure. @@ -64,7 +65,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py index 4f3da1e8cf85..c261c385de8d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py @@ -28,9 +28,11 @@ class CopySource(Model): AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, OracleSource, AzureDataExplorerSource, AzureMySqlSource, HdfsSource, FileSystemSource, - SqlDWSource, SqlSource, RestSource, SapOpenHubSource, SapEccSource, + SqlDWSource, AzureSqlSource, SqlServerSource, SqlSource, RestSource, + SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, SalesforceSource, RelationalSource, - DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource + DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource, + DelimitedTextSource, ParquetSource All required parameters must be populated in order to send to Azure. @@ -65,7 +67,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py index eb439548481a..5a0b7d807b7f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py @@ -28,9 +28,11 @@ class CopySource(Model): AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, OracleSource, AzureDataExplorerSource, AzureMySqlSource, HdfsSource, FileSystemSource, - SqlDWSource, SqlSource, RestSource, SapOpenHubSource, SapEccSource, + SqlDWSource, AzureSqlSource, SqlServerSource, SqlSource, RestSource, + SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, SalesforceSource, RelationalSource, - DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource + DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource, + DelimitedTextSource, ParquetSource All required parameters must be populated in order to send to Azure. @@ -65,7 +67,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py index b372cb91d8ef..ded527b2602a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py @@ -321,18 +321,6 @@ class SybaseAuthenticationType(str, Enum): windows = "Windows" -class DatasetCompressionLevel(str, Enum): - - optimal = "Optimal" - fastest = "Fastest" - - -class JsonFormatFilePattern(str, Enum): - - set_of_objects = "setOfObjects" - array_of_objects = "arrayOfObjects" - - class AzureFunctionActivityMethod(str, Enum): get = "GET" @@ -352,36 +340,17 @@ class WebActivityMethod(str, Enum): delete = "DELETE" -class CassandraSourceReadConsistencyLevels(str, Enum): - - all = "ALL" - each_quorum = "EACH_QUORUM" - quorum = "QUORUM" - local_quorum = "LOCAL_QUORUM" - one = "ONE" - two = "TWO" - three = "THREE" - local_one = "LOCAL_ONE" - serial = "SERIAL" - local_serial = "LOCAL_SERIAL" - - class StoredProcedureParameterType(str, Enum): string = "String" int_enum = "Int" + int64 = "Int64" decimal_enum = "Decimal" guid = "Guid" boolean = "Boolean" date_enum = "Date" -class SalesforceSourceReadBehavior(str, Enum): - - query = "Query" - query_all = "QueryAll" - - class HDInsightActivityDebugInfoOption(str, Enum): none = "None" @@ -389,37 +358,12 @@ class HDInsightActivityDebugInfoOption(str, Enum): failure = "Failure" -class SalesforceSinkWriteBehavior(str, Enum): - - insert = "Insert" - upsert = "Upsert" - - -class AzureSearchIndexWriteBehaviorType(str, Enum): - - merge = "Merge" - upload = "Upload" - - -class CopyBehaviorType(str, Enum): - - preserve_hierarchy = "PreserveHierarchy" - flatten_hierarchy = "FlattenHierarchy" - merge_files = "MergeFiles" - - class PolybaseSettingsRejectType(str, Enum): value = "value" percentage = "percentage" -class SapCloudForCustomerSinkWriteBehavior(str, Enum): - - insert = "Insert" - update = "Update" - - class WebHookActivityMethod(str, Enum): post = "POST" @@ -464,6 +408,12 @@ class ManagedIntegrationRuntimeNodeStatus(str, Enum): unavailable = "Unavailable" +class IntegrationRuntimeEntityReferenceType(str, Enum): + + integration_runtime_reference = "IntegrationRuntimeReference" + linked_service_reference = "LinkedServiceReference" + + class IntegrationRuntimeSsisCatalogPricingTier(str, Enum): basic = "Basic" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py index af540b1e6429..76d8375b3da9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py @@ -30,16 +30,17 @@ class Dataset(Model): GoogleBigQueryObjectDataset, EloquaObjectDataset, DrillTableDataset, CouchbaseTableDataset, ConcurObjectDataset, AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, HttpDataset, AzureSearchIndexDataset, - WebTableDataset, RestResourceDataset, SqlServerTableDataset, - SapOpenHubTableDataset, SapEccResourceDataset, - SapCloudForCustomerResourceDataset, SalesforceObjectDataset, - RelationalTableDataset, AzureMySqlTableDataset, OracleTableDataset, - ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, - MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, - Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, - DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, - CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlTableDataset, - AzureTableDataset, AzureBlobDataset, AmazonS3Dataset + WebTableDataset, SapTableResourceDataset, RestResourceDataset, + SqlServerTableDataset, SapOpenHubTableDataset, SapHanaTableDataset, + SapEccResourceDataset, SapCloudForCustomerResourceDataset, + SalesforceObjectDataset, RelationalTableDataset, AzureMySqlTableDataset, + OracleTableDataset, ODataResourceDataset, + CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, + MongoDbCollectionDataset, FileShareDataset, Office365Dataset, + AzureBlobFSDataset, AzureDataLakeStoreDataset, DynamicsEntityDataset, + DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, + AzureSqlDWTableDataset, AzureSqlTableDataset, AzureTableDataset, + AzureBlobDataset, DelimitedTextDataset, ParquetDataset, AmazonS3Dataset All required parameters must be populated in order to send to Azure. @@ -89,7 +90,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py index c16c0611b364..9c97e2bfa5e3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py @@ -22,9 +22,8 @@ class DatasetDeflateCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str - :param level: The Deflate compression level. Possible values include: - 'Optimal', 'Fastest' - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The Deflate compression level. + :type level: object """ _validation = { @@ -34,7 +33,7 @@ class DatasetDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py index 715fe91a12a3..11d00081bc1c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py @@ -22,9 +22,8 @@ class DatasetDeflateCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str - :param level: The Deflate compression level. Possible values include: - 'Optimal', 'Fastest' - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The Deflate compression level. + :type level: object """ _validation = { @@ -34,7 +33,7 @@ class DatasetDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py index 48317d06f34e..4925127c7f0f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py @@ -22,9 +22,8 @@ class DatasetGZipCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str - :param level: The GZip compression level. Possible values include: - 'Optimal', 'Fastest' - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The GZip compression level. + :type level: object """ _validation = { @@ -34,7 +33,7 @@ class DatasetGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py index 99b1081469f8..97346e06366d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py @@ -22,9 +22,8 @@ class DatasetGZipCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str - :param level: The GZip compression level. Possible values include: - 'Optimal', 'Fastest' - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The GZip compression level. + :type level: object """ _validation = { @@ -34,7 +33,7 @@ class DatasetGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py new file mode 100644 index 000000000000..2c318a91cccb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetLocation(Model): + """Dataset location. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DatasetLocation, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py new file mode 100644 index 000000000000..d4e32d753197 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetLocation(Model): + """Dataset location. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(DatasetLocation, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.folder_path = folder_path + self.file_name = file_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py index adc64b228236..c793e32f2251 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py @@ -30,16 +30,17 @@ class Dataset(Model): GoogleBigQueryObjectDataset, EloquaObjectDataset, DrillTableDataset, CouchbaseTableDataset, ConcurObjectDataset, AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, HttpDataset, AzureSearchIndexDataset, - WebTableDataset, RestResourceDataset, SqlServerTableDataset, - SapOpenHubTableDataset, SapEccResourceDataset, - SapCloudForCustomerResourceDataset, SalesforceObjectDataset, - RelationalTableDataset, AzureMySqlTableDataset, OracleTableDataset, - ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, - MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, - Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, - DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, - CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlTableDataset, - AzureTableDataset, AzureBlobDataset, AmazonS3Dataset + WebTableDataset, SapTableResourceDataset, RestResourceDataset, + SqlServerTableDataset, SapOpenHubTableDataset, SapHanaTableDataset, + SapEccResourceDataset, SapCloudForCustomerResourceDataset, + SalesforceObjectDataset, RelationalTableDataset, AzureMySqlTableDataset, + OracleTableDataset, ODataResourceDataset, + CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, + MongoDbCollectionDataset, FileShareDataset, Office365Dataset, + AzureBlobFSDataset, AzureDataLakeStoreDataset, DynamicsEntityDataset, + DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, + AzureSqlDWTableDataset, AzureSqlTableDataset, AzureTableDataset, + AzureBlobDataset, DelimitedTextDataset, ParquetDataset, AmazonS3Dataset All required parameters must be populated in order to send to Azure. @@ -89,7 +90,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py index 9312098be5a3..ed80bf3cbcf2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py @@ -22,9 +22,8 @@ class DatasetZipDeflateCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str - :param level: The ZipDeflate compression level. Possible values include: - 'Optimal', 'Fastest' - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The ZipDeflate compression level. + :type level: object """ _validation = { @@ -34,7 +33,7 @@ class DatasetZipDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py index 74fbb92ce1ab..20abd6fe1088 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py @@ -22,9 +22,8 @@ class DatasetZipDeflateCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str - :param level: The ZipDeflate compression level. Possible values include: - 'Optimal', 'Fastest' - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The ZipDeflate compression level. + :type level: object """ _validation = { @@ -34,7 +33,7 @@ class DatasetZipDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py new file mode 100644 index 000000000000..bfee26fcd12c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py @@ -0,0 +1,122 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class DelimitedTextDataset(Dataset): + """Delimited text dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the delimited text storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param column_delimiter: The column delimiter. Type: string (or Expression + with resultType string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with + resultType string). + :type row_delimiter: object + :param encoding_name: The code page name of the preferred encoding. If + miss, the default value is UTF-8, unless BOM denotes another Unicode + encoding. Refer to the name column of the table in the following link to + set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param compression_codec: + :type compression_codec: object + :param compression_level: The data compression method used for + DelimitedText. + :type compression_level: object + :param quote_char: The quote character. Type: string (or Expression with + resultType string). + :type quote_char: object + :param escape_char: The escape character. Type: string (or Expression with + resultType string). + :type escape_char: object + :param first_row_as_header: When used as input, treat the first row of + data as headers. When used as output,write the headers into the output as + the first row of data. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type first_row_as_header: object + :param null_value: The null value string. Type: string (or Expression with + resultType string). + :type null_value: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, + 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, + 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, + 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, + 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.column_delimiter = kwargs.get('column_delimiter', None) + self.row_delimiter = kwargs.get('row_delimiter', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.compression_codec = kwargs.get('compression_codec', None) + self.compression_level = kwargs.get('compression_level', None) + self.quote_char = kwargs.get('quote_char', None) + self.escape_char = kwargs.get('escape_char', None) + self.first_row_as_header = kwargs.get('first_row_as_header', None) + self.null_value = kwargs.get('null_value', None) + self.type = 'DelimitedText' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py new file mode 100644 index 000000000000..c2597e6a022b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py @@ -0,0 +1,122 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DelimitedTextDataset(Dataset): + """Delimited text dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the delimited text storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param column_delimiter: The column delimiter. Type: string (or Expression + with resultType string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with + resultType string). + :type row_delimiter: object + :param encoding_name: The code page name of the preferred encoding. If + miss, the default value is UTF-8, unless BOM denotes another Unicode + encoding. Refer to the name column of the table in the following link to + set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param compression_codec: + :type compression_codec: object + :param compression_level: The data compression method used for + DelimitedText. + :type compression_level: object + :param quote_char: The quote character. Type: string (or Expression with + resultType string). + :type quote_char: object + :param escape_char: The escape character. Type: string (or Expression with + resultType string). + :type escape_char: object + :param first_row_as_header: When used as input, treat the first row of + data as headers. When used as output,write the headers into the output as + the first row of data. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type first_row_as_header: object + :param null_value: The null value string. Type: string (or Expression with + resultType string). + :type null_value: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, + 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, + 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, + 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, + 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, column_delimiter=None, row_delimiter=None, encoding_name=None, compression_codec=None, compression_level=None, quote_char=None, escape_char=None, first_row_as_header=None, null_value=None, **kwargs) -> None: + super(DelimitedTextDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.column_delimiter = column_delimiter + self.row_delimiter = row_delimiter + self.encoding_name = encoding_name + self.compression_codec = compression_codec + self.compression_level = compression_level + self.quote_char = quote_char + self.escape_char = escape_char + self.first_row_as_header = first_row_as_header + self.null_value = null_value + self.type = 'DelimitedText' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting.py new file mode 100644 index 000000000000..004eb595a05e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_read_setting import FormatReadSetting + + +class DelimitedTextReadSetting(FormatReadSetting): + """Delimited text read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param skip_line_count: Indicates the number of non-empty rows to skip + when reading data from input files. Type: integer (or Expression with + resultType integer). + :type skip_line_count: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextReadSetting, self).__init__(**kwargs) + self.skip_line_count = kwargs.get('skip_line_count', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting_py3.py new file mode 100644 index 000000000000..87915fcb3db7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting_py3.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_read_setting_py3 import FormatReadSetting + + +class DelimitedTextReadSetting(FormatReadSetting): + """Delimited text read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param skip_line_count: Indicates the number of non-empty rows to skip + when reading data from input files. Type: integer (or Expression with + resultType integer). + :type skip_line_count: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, skip_line_count=None, **kwargs) -> None: + super(DelimitedTextReadSetting, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.skip_line_count = skip_line_count diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py new file mode 100644 index 000000000000..ae93f209c8b3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class DelimitedTextSink(CopySink): + """A copy activity DelimitedText sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorWriteSetting + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextWriteSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorWriteSetting'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSetting'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + self.type = 'DelimitedTextSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py new file mode 100644 index 000000000000..a1ba953a2662 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class DelimitedTextSink(CopySink): + """A copy activity DelimitedText sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorWriteSetting + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextWriteSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorWriteSetting'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSetting'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'DelimitedTextSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py new file mode 100644 index 000000000000..9f2067d24b9c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class DelimitedTextSource(CopySource): + """A copy activity DelimitedText source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorReadSetting + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextReadSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorReadSetting'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSetting'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + self.type = 'DelimitedTextSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py new file mode 100644 index 000000000000..b158f97bde81 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DelimitedTextSource(CopySource): + """A copy activity DelimitedText source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorReadSetting + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextReadSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorReadSetting'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSetting'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'DelimitedTextSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting.py new file mode 100644 index 000000000000..21fe168f1316 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_setting import FormatWriteSetting + + +class DelimitedTextWriteSetting(FormatWriteSetting): + """Delimited text write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param quote_all_text: Indicates whether string values should always be + enclosed with quotes. Type: boolean (or Expression with resultType + boolean). + :type quote_all_text: object + :param file_extension: Required. The file extension used to create the + files. Type: string (or Expression with resultType string). + :type file_extension: object + """ + + _validation = { + 'type': {'required': True}, + 'file_extension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, + 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextWriteSetting, self).__init__(**kwargs) + self.quote_all_text = kwargs.get('quote_all_text', None) + self.file_extension = kwargs.get('file_extension', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting_py3.py new file mode 100644 index 000000000000..ac0e3b2d00cc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_setting_py3 import FormatWriteSetting + + +class DelimitedTextWriteSetting(FormatWriteSetting): + """Delimited text write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param quote_all_text: Indicates whether string values should always be + enclosed with quotes. Type: boolean (or Expression with resultType + boolean). + :type quote_all_text: object + :param file_extension: Required. The file extension used to create the + files. Type: string (or Expression with resultType string). + :type file_extension: object + """ + + _validation = { + 'type': {'required': True}, + 'file_extension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, + 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + } + + def __init__(self, *, type: str, file_extension, additional_properties=None, quote_all_text=None, **kwargs) -> None: + super(DelimitedTextWriteSetting, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.quote_all_text = quote_all_text + self.file_extension = file_extension diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py index 25e80ee45466..c2908dc1dd05 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py @@ -43,6 +43,9 @@ class DocumentDbCollectionSink(CopySink): :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). :type nesting_separator: object + :param write_behavior: Describes how to write data to Azure Cosmos DB. + Allowed values: insert and upsert. + :type write_behavior: object """ _validation = { @@ -58,9 +61,11 @@ class DocumentDbCollectionSink(CopySink): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__(self, **kwargs): super(DocumentDbCollectionSink, self).__init__(**kwargs) self.nesting_separator = kwargs.get('nesting_separator', None) + self.write_behavior = kwargs.get('write_behavior', None) self.type = 'DocumentDbCollectionSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py index 111897036215..f1410cd211a4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py @@ -43,6 +43,9 @@ class DocumentDbCollectionSink(CopySink): :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). :type nesting_separator: object + :param write_behavior: Describes how to write data to Azure Cosmos DB. + Allowed values: insert and upsert. + :type write_behavior: object """ _validation = { @@ -58,9 +61,11 @@ class DocumentDbCollectionSink(CopySink): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, nesting_separator=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, nesting_separator=None, write_behavior=None, **kwargs) -> None: super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.nesting_separator = nesting_separator + self.write_behavior = write_behavior self.type = 'DocumentDbCollectionSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py index 233c4c99d4df..392b8ac7b971 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py @@ -45,7 +45,7 @@ class DynamicsAXResourceDataset(Dataset): :type type: str :param path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression with resultType string). - :type path: str + :type path: object """ _validation = { @@ -64,7 +64,7 @@ class DynamicsAXResourceDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py index 788c9084ee9b..6cade3e4aa59 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py @@ -45,7 +45,7 @@ class DynamicsAXResourceDataset(Dataset): :type type: str :param path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression with resultType string). - :type path: str + :type path: object """ _validation = { @@ -64,10 +64,10 @@ class DynamicsAXResourceDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, } - def __init__(self, *, linked_service_name, path: str, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: super(DynamicsAXResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.path = path self.type = 'DynamicsAXResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py index 45bac7b52064..5afce6ced25b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py @@ -44,8 +44,7 @@ class DynamicsSink(CopySink): :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. - Default value: "Upsert" . - :vartype write_behavior: str + :vartype write_behavior: object :param ignore_null_values: The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). @@ -65,11 +64,11 @@ class DynamicsSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } - write_behavior = "Upsert" + write_behavior = None def __init__(self, **kwargs): super(DynamicsSink, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py index 5f736f9cf658..ffdb08363bfd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py @@ -44,8 +44,7 @@ class DynamicsSink(CopySink): :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. - Default value: "Upsert" . - :vartype write_behavior: str + :vartype write_behavior: object :param ignore_null_values: The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). @@ -65,11 +64,11 @@ class DynamicsSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } - write_behavior = "Upsert" + write_behavior = None def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py new file mode 100644 index 000000000000..5db1448a5a55 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class EntityReference(Model): + """The entity reference. + + :param type: The type of this referenced entity. Possible values include: + 'IntegrationRuntimeReference', 'LinkedServiceReference' + :type type: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType + :param reference_name: The name of this referenced entity. + :type reference_name: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(EntityReference, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.reference_name = kwargs.get('reference_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py new file mode 100644 index 000000000000..f87698b67a64 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class EntityReference(Model): + """The entity reference. + + :param type: The type of this referenced entity. Possible values include: + 'IntegrationRuntimeReference', 'LinkedServiceReference' + :type type: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType + :param reference_name: The name of this referenced entity. + :type reference_name: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__(self, *, type=None, reference_name: str=None, **kwargs) -> None: + super(EntityReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py new file mode 100644 index 000000000000..edce5fe68a65 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class FileServerLocation(DatasetLocation): + """The location of file server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileServerLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py new file mode 100644 index 000000000000..f7fb8354bcbc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class FileServerLocation(DatasetLocation): + """The location of file server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(FileServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting.py new file mode 100644 index 000000000000..6ba2a5f56b79 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class FileServerReadSetting(ConnectorReadSetting): + """File server read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: FileServer wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileServerReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting_py3.py new file mode 100644 index 000000000000..4393692d63f3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class FileServerReadSetting(ConnectorReadSetting): + """File server read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: FileServer wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(FileServerReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting.py new file mode 100644 index 000000000000..9342210abdfb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting import ConnectorWriteSetting + + +class FileServerWriteSetting(ConnectorWriteSetting): + """File server write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileServerWriteSetting, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting_py3.py new file mode 100644 index 000000000000..1ed4bf220417 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting_py3 import ConnectorWriteSetting + + +class FileServerWriteSetting(ConnectorWriteSetting): + """File server write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(FileServerWriteSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py index 75baab87456e..8b8f238c9534 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py @@ -40,10 +40,8 @@ class FileSystemSink(CopySink): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -58,7 +56,7 @@ class FileSystemSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py index 92388128726e..24f8623cbb02 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py @@ -40,10 +40,8 @@ class FileSystemSink(CopySink): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -58,7 +56,7 @@ class FileSystemSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting.py similarity index 73% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting.py index 2b0242ef997c..730cec9f525f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting.py @@ -12,18 +12,15 @@ from msrest.serialization import Model -class CopyTranslator(Model): - """A copy activity translator. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: TabularTranslator +class FormatReadSetting(Model): + """Format read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. + :param type: Required. The read setting type. :type type: str """ @@ -36,11 +33,7 @@ class CopyTranslator(Model): 'type': {'key': 'type', 'type': 'str'}, } - _subtype_map = { - 'type': {'TabularTranslator': 'TabularTranslator'} - } - def __init__(self, **kwargs): - super(CopyTranslator, self).__init__(**kwargs) + super(FormatReadSetting, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = None + self.type = kwargs.get('type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting_py3.py similarity index 68% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator_py3.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting_py3.py index 3fef58394fd0..ed68bf35f009 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting_py3.py @@ -12,18 +12,15 @@ from msrest.serialization import Model -class CopyTranslator(Model): - """A copy activity translator. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: TabularTranslator +class FormatReadSetting(Model): + """Format read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. + :param type: Required. The read setting type. :type type: str """ @@ -36,11 +33,7 @@ class CopyTranslator(Model): 'type': {'key': 'type', 'type': 'str'}, } - _subtype_map = { - 'type': {'TabularTranslator': 'TabularTranslator'} - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(CopyTranslator, self).__init__(**kwargs) + def __init__(self, *, type: str, additional_properties=None, **kwargs) -> None: + super(FormatReadSetting, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type = None + self.type = type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting.py new file mode 100644 index 000000000000..0fd6966859d5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FormatWriteSetting(Model): + """Format write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(FormatWriteSetting, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting_py3.py new file mode 100644 index 000000000000..3e5609066208 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FormatWriteSetting(Model): + """Format write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, type: str, additional_properties=None, **kwargs) -> None: + super(FormatWriteSetting, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting.py new file mode 100644 index 000000000000..137a56948deb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class FtpReadSetting(ConnectorReadSetting): + """Ftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Ftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param use_binary_transfer: Specify whether to use binary transfer mode + for FTP stores. + :type use_binary_transfer: bool + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(FtpReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.use_binary_transfer = kwargs.get('use_binary_transfer', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting_py3.py new file mode 100644 index 000000000000..5294301e4fd8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class FtpReadSetting(ConnectorReadSetting): + """Ftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Ftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param use_binary_transfer: Specify whether to use binary transfer mode + for FTP stores. + :type use_binary_transfer: bool + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, use_binary_transfer: bool=None, **kwargs) -> None: + super(FtpReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.use_binary_transfer = use_binary_transfer diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py new file mode 100644 index 000000000000..5d5e933036df --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class FtpServerLocation(DatasetLocation): + """The location of ftp server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FtpServerLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py new file mode 100644 index 000000000000..ac296bcfca31 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class FtpServerLocation(DatasetLocation): + """The location of ftp server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(FtpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py new file mode 100644 index 000000000000..a8f5d1ba332c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class HdfsLocation(DatasetLocation): + """The location of HDFS. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HdfsLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py new file mode 100644 index 000000000000..2e07575bef0f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class HdfsLocation(DatasetLocation): + """The location of HDFS. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(HdfsLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting.py new file mode 100644 index 000000000000..4fdadbc2fcd0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class HdfsReadSetting(ConnectorReadSetting): + """HDFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: HDFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__(self, **kwargs): + super(HdfsReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.distcp_settings = kwargs.get('distcp_settings', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting_py3.py new file mode 100644 index 000000000000..164a6f497e52 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class HdfsReadSetting(ConnectorReadSetting): + """HDFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: HDFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, distcp_settings=None, **kwargs) -> None: + super(HdfsReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.distcp_settings = distcp_settings diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting.py new file mode 100644 index 000000000000..696a9fdb3faf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class HttpReadSetting(ConnectorReadSetting): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param request_timeout: Specifies the timeout for a HTTP client to get + HTTP response from HTTP server. + :type request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HttpReadSetting, self).__init__(**kwargs) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.request_timeout = kwargs.get('request_timeout', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting_py3.py new file mode 100644 index 000000000000..3d5d75a80785 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class HttpReadSetting(ConnectorReadSetting): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param request_timeout: Specifies the timeout for a HTTP client to get + HTTP response from HTTP server. + :type request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, request_timeout=None, **kwargs) -> None: + super(HttpReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.request_timeout = request_timeout diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py new file mode 100644 index 000000000000..94106fae9d15 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class HttpServerLocation(DatasetLocation): + """The location of http server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param relative_url: Specify the relativeUrl of http server. Type: string + (or Expression with resultType string) + :type relative_url: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HttpServerLocation, self).__init__(**kwargs) + self.relative_url = kwargs.get('relative_url', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py new file mode 100644 index 000000000000..c52c53dcf357 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class HttpServerLocation(DatasetLocation): + """The location of http server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param relative_url: Specify the relativeUrl of http server. Type: string + (or Expression with resultType string) + :type relative_url: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, relative_url=None, **kwargs) -> None: + super(HttpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.relative_url = relative_url diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py new file mode 100644 index 000000000000..ebc0e9b38d6f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeDataProxyProperties(Model): + """Data proxy properties for a managed dedicated integration runtime. + + :param connect_via: The self-hosted integration runtime reference. + :type connect_via: ~azure.mgmt.datafactory.models.EntityReference + :param staging_linked_service: The staging linked service reference. + :type staging_linked_service: + ~azure.mgmt.datafactory.models.EntityReference + :param path: The path to contain the staged data in the Blob storage. + :type path: str + """ + + _attribute_map = { + 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, + 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) + self.connect_via = kwargs.get('connect_via', None) + self.staging_linked_service = kwargs.get('staging_linked_service', None) + self.path = kwargs.get('path', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py new file mode 100644 index 000000000000..532b774cad3d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeDataProxyProperties(Model): + """Data proxy properties for a managed dedicated integration runtime. + + :param connect_via: The self-hosted integration runtime reference. + :type connect_via: ~azure.mgmt.datafactory.models.EntityReference + :param staging_linked_service: The staging linked service reference. + :type staging_linked_service: + ~azure.mgmt.datafactory.models.EntityReference + :param path: The path to contain the staged data in the Blob storage. + :type path: str + """ + + _attribute_map = { + 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, + 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__(self, *, connect_via=None, staging_linked_service=None, path: str=None, **kwargs) -> None: + super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) + self.connect_via = connect_via + self.staging_linked_service = staging_linked_service + self.path = path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py index e1a091166529..293f071aa0b3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py @@ -30,6 +30,10 @@ class IntegrationRuntimeSsisProperties(Model): a managed dedicated integration runtime. :type custom_setup_script_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties + :param data_proxy_properties: Data proxy properties for a managed + dedicated integration runtime. + :type data_proxy_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties :param edition: The edition for the SSIS Integration Runtime. Possible values include: 'Standard', 'Enterprise' :type edition: str or @@ -41,6 +45,7 @@ class IntegrationRuntimeSsisProperties(Model): 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, 'license_type': {'key': 'licenseType', 'type': 'str'}, 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, + 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, 'edition': {'key': 'edition', 'type': 'str'}, } @@ -50,4 +55,5 @@ def __init__(self, **kwargs): self.catalog_info = kwargs.get('catalog_info', None) self.license_type = kwargs.get('license_type', None) self.custom_setup_script_properties = kwargs.get('custom_setup_script_properties', None) + self.data_proxy_properties = kwargs.get('data_proxy_properties', None) self.edition = kwargs.get('edition', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py index eb70dd23ddb7..f75775e29a7f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py @@ -30,6 +30,10 @@ class IntegrationRuntimeSsisProperties(Model): a managed dedicated integration runtime. :type custom_setup_script_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties + :param data_proxy_properties: Data proxy properties for a managed + dedicated integration runtime. + :type data_proxy_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties :param edition: The edition for the SSIS Integration Runtime. Possible values include: 'Standard', 'Enterprise' :type edition: str or @@ -41,13 +45,15 @@ class IntegrationRuntimeSsisProperties(Model): 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, 'license_type': {'key': 'licenseType', 'type': 'str'}, 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, + 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, 'edition': {'key': 'edition', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, catalog_info=None, license_type=None, custom_setup_script_properties=None, edition=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, catalog_info=None, license_type=None, custom_setup_script_properties=None, data_proxy_properties=None, edition=None, **kwargs) -> None: super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) self.additional_properties = additional_properties self.catalog_info = catalog_info self.license_type = license_type self.custom_setup_script_properties = custom_setup_script_properties + self.data_proxy_properties = data_proxy_properties self.edition = edition diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py index 736f9500018f..80f4ff0aaf8b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py @@ -30,10 +30,8 @@ class JsonFormat(DatasetStorageFormat): :type type: str :param file_pattern: File pattern of JSON. To be more specific, the way of separating a collection of JSON objects. The default value is - 'setOfObjects'. It is case-sensitive. Possible values include: - 'setOfObjects', 'arrayOfObjects' - :type file_pattern: str or - ~azure.mgmt.datafactory.models.JsonFormatFilePattern + 'setOfObjects'. It is case-sensitive. + :type file_pattern: object :param nesting_separator: The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). @@ -67,7 +65,7 @@ class JsonFormat(DatasetStorageFormat): 'serializer': {'key': 'serializer', 'type': 'object'}, 'deserializer': {'key': 'deserializer', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'encoding_name': {'key': 'encodingName', 'type': 'object'}, 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py index a9a7f20ea103..2fdb44cc3b7f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py @@ -30,10 +30,8 @@ class JsonFormat(DatasetStorageFormat): :type type: str :param file_pattern: File pattern of JSON. To be more specific, the way of separating a collection of JSON objects. The default value is - 'setOfObjects'. It is case-sensitive. Possible values include: - 'setOfObjects', 'arrayOfObjects' - :type file_pattern: str or - ~azure.mgmt.datafactory.models.JsonFormatFilePattern + 'setOfObjects'. It is case-sensitive. + :type file_pattern: object :param nesting_separator: The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). @@ -67,7 +65,7 @@ class JsonFormat(DatasetStorageFormat): 'serializer': {'key': 'serializer', 'type': 'object'}, 'deserializer': {'key': 'deserializer', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'encoding_name': {'key': 'encodingName', 'type': 'object'}, 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py index 3d4660d72e89..81ce26e5b657 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py @@ -19,26 +19,27 @@ class LinkedService(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: AzureFunctionLinkedService, - AzureDataExplorerLinkedService, GoogleAdWordsLinkedService, - OracleServiceCloudLinkedService, DynamicsAXLinkedService, - ResponsysLinkedService, AzureDatabricksLinkedService, - AzureDataLakeAnalyticsLinkedService, HDInsightOnDemandLinkedService, - SalesforceMarketingCloudLinkedService, NetezzaLinkedService, - VerticaLinkedService, ZohoLinkedService, XeroLinkedService, - SquareLinkedService, SparkLinkedService, ShopifyLinkedService, - ServiceNowLinkedService, QuickBooksLinkedService, PrestoLinkedService, - PhoenixLinkedService, PaypalLinkedService, MarketoLinkedService, - MariaDBLinkedService, MagentoLinkedService, JiraLinkedService, - ImpalaLinkedService, HubspotLinkedService, HiveLinkedService, - HBaseLinkedService, GreenplumLinkedService, GoogleBigQueryLinkedService, - EloquaLinkedService, DrillLinkedService, CouchbaseLinkedService, - ConcurLinkedService, AzurePostgreSqlLinkedService, AmazonMWSLinkedService, - SapHanaLinkedService, SapBWLinkedService, SftpServerLinkedService, - FtpServerLinkedService, HttpLinkedService, AzureSearchLinkedService, - CustomDataSourceLinkedService, AmazonRedshiftLinkedService, - AmazonS3LinkedService, RestServiceLinkedService, SapOpenHubLinkedService, - SapEccLinkedService, SapCloudForCustomerLinkedService, - SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, + AzureDataExplorerLinkedService, SapTableLinkedService, + GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, + DynamicsAXLinkedService, ResponsysLinkedService, + AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, + HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, + NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, + XeroLinkedService, SquareLinkedService, SparkLinkedService, + ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, + PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, + MarketoLinkedService, MariaDBLinkedService, MagentoLinkedService, + JiraLinkedService, ImpalaLinkedService, HubspotLinkedService, + HiveLinkedService, HBaseLinkedService, GreenplumLinkedService, + GoogleBigQueryLinkedService, EloquaLinkedService, DrillLinkedService, + CouchbaseLinkedService, ConcurLinkedService, AzurePostgreSqlLinkedService, + AmazonMWSLinkedService, SapHanaLinkedService, SapBWLinkedService, + SftpServerLinkedService, FtpServerLinkedService, HttpLinkedService, + AzureSearchLinkedService, CustomDataSourceLinkedService, + AmazonRedshiftLinkedService, AmazonS3LinkedService, + RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, + SapCloudForCustomerLinkedService, SalesforceLinkedService, + Office365LinkedService, AzureBlobFSLinkedService, AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, WebLinkedService, ODataLinkedService, HdfsLinkedService, OdbcLinkedService, @@ -85,7 +86,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py index eadf4030e132..1ec0d17d24c5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py @@ -19,26 +19,27 @@ class LinkedService(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: AzureFunctionLinkedService, - AzureDataExplorerLinkedService, GoogleAdWordsLinkedService, - OracleServiceCloudLinkedService, DynamicsAXLinkedService, - ResponsysLinkedService, AzureDatabricksLinkedService, - AzureDataLakeAnalyticsLinkedService, HDInsightOnDemandLinkedService, - SalesforceMarketingCloudLinkedService, NetezzaLinkedService, - VerticaLinkedService, ZohoLinkedService, XeroLinkedService, - SquareLinkedService, SparkLinkedService, ShopifyLinkedService, - ServiceNowLinkedService, QuickBooksLinkedService, PrestoLinkedService, - PhoenixLinkedService, PaypalLinkedService, MarketoLinkedService, - MariaDBLinkedService, MagentoLinkedService, JiraLinkedService, - ImpalaLinkedService, HubspotLinkedService, HiveLinkedService, - HBaseLinkedService, GreenplumLinkedService, GoogleBigQueryLinkedService, - EloquaLinkedService, DrillLinkedService, CouchbaseLinkedService, - ConcurLinkedService, AzurePostgreSqlLinkedService, AmazonMWSLinkedService, - SapHanaLinkedService, SapBWLinkedService, SftpServerLinkedService, - FtpServerLinkedService, HttpLinkedService, AzureSearchLinkedService, - CustomDataSourceLinkedService, AmazonRedshiftLinkedService, - AmazonS3LinkedService, RestServiceLinkedService, SapOpenHubLinkedService, - SapEccLinkedService, SapCloudForCustomerLinkedService, - SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, + AzureDataExplorerLinkedService, SapTableLinkedService, + GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, + DynamicsAXLinkedService, ResponsysLinkedService, + AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, + HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, + NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, + XeroLinkedService, SquareLinkedService, SparkLinkedService, + ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, + PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, + MarketoLinkedService, MariaDBLinkedService, MagentoLinkedService, + JiraLinkedService, ImpalaLinkedService, HubspotLinkedService, + HiveLinkedService, HBaseLinkedService, GreenplumLinkedService, + GoogleBigQueryLinkedService, EloquaLinkedService, DrillLinkedService, + CouchbaseLinkedService, ConcurLinkedService, AzurePostgreSqlLinkedService, + AmazonMWSLinkedService, SapHanaLinkedService, SapBWLinkedService, + SftpServerLinkedService, FtpServerLinkedService, HttpLinkedService, + AzureSearchLinkedService, CustomDataSourceLinkedService, + AmazonRedshiftLinkedService, AmazonS3LinkedService, + RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, + SapCloudForCustomerLinkedService, SalesforceLinkedService, + Office365LinkedService, AzureBlobFSLinkedService, AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, WebLinkedService, ODataLinkedService, HdfsLinkedService, OdbcLinkedService, @@ -85,7 +86,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py new file mode 100644 index 000000000000..ed970fd7729a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OraclePartitionSettings(Model): + """The settings that will be leveraged for oracle source partitioning. + + :param partition_names: Names of the physical partitions of oracle table. + :type partition_names: object + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_names': {'key': 'partitionNames', 'type': 'object'}, + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OraclePartitionSettings, self).__init__(**kwargs) + self.partition_names = kwargs.get('partition_names', None) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py new file mode 100644 index 000000000000..c3d00b09ad90 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OraclePartitionSettings(Model): + """The settings that will be leveraged for oracle source partitioning. + + :param partition_names: Names of the physical partitions of oracle table. + :type partition_names: object + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_names': {'key': 'partitionNames', 'type': 'object'}, + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, *, partition_names=None, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: + super(OraclePartitionSettings, self).__init__(**kwargs) + self.partition_names = partition_names + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py index 12b3aa31353f..84ad79ed19c7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py @@ -40,6 +40,13 @@ class OracleSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param partition_option: The partition mechanism that will be used for + oracle read in parallel. + :type partition_option: object + :param partition_settings: The settings that will be leveraged for oracle + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.OraclePartitionSettings """ _validation = { @@ -54,10 +61,14 @@ class OracleSource(CopySource): 'type': {'key': 'type', 'type': 'str'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, } def __init__(self, **kwargs): super(OracleSource, self).__init__(**kwargs) self.oracle_reader_query = kwargs.get('oracle_reader_query', None) self.query_timeout = kwargs.get('query_timeout', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) self.type = 'OracleSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py index 43afe27fda2f..dfcbd2e0330d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py @@ -40,6 +40,13 @@ class OracleSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param partition_option: The partition mechanism that will be used for + oracle read in parallel. + :type partition_option: object + :param partition_settings: The settings that will be leveraged for oracle + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.OraclePartitionSettings """ _validation = { @@ -54,10 +61,14 @@ class OracleSource(CopySource): 'type': {'key': 'type', 'type': 'str'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, partition_option=None, partition_settings=None, **kwargs) -> None: super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.oracle_reader_query = oracle_reader_query self.query_timeout = query_timeout + self.partition_option = partition_option + self.partition_settings = partition_settings self.type = 'OracleSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py new file mode 100644 index 000000000000..ffaf8e1f6d93 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class ParquetDataset(Dataset): + """Parquet dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the parquet storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression_codec: + :type compression_codec: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ParquetDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.compression_codec = kwargs.get('compression_codec', None) + self.type = 'Parquet' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py new file mode 100644 index 000000000000..4d754450ce15 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ParquetDataset(Dataset): + """Parquet dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the parquet storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression_codec: + :type compression_codec: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression_codec=None, **kwargs) -> None: + super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.compression_codec = compression_codec + self.type = 'Parquet' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py new file mode 100644 index 000000000000..38c634ed10dd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class ParquetSink(CopySink): + """A copy activity Parquet sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorWriteSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorWriteSetting'}, + } + + def __init__(self, **kwargs): + super(ParquetSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'ParquetSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py new file mode 100644 index 000000000000..96c0c1b57926 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class ParquetSink(CopySink): + """A copy activity Parquet sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorWriteSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorWriteSetting'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'ParquetSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py new file mode 100644 index 000000000000..02e74641d506 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class ParquetSource(CopySource): + """A copy activity Parquet source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorReadSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorReadSetting'}, + } + + def __init__(self, **kwargs): + super(ParquetSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'ParquetSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py new file mode 100644 index 000000000000..bfe077dd9999 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ParquetSource(CopySource): + """A copy activity Parquet source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorReadSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorReadSetting'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'ParquetSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py index 9a1291bd4bfe..4d1a93c08915 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py @@ -41,9 +41,8 @@ class SalesforceSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is - Insert. Possible values include: 'Insert', 'Upsert' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + Insert. + :type write_behavior: object :param external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). @@ -71,7 +70,7 @@ class SalesforceSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py index 54a56618d01e..ed7591fbb59b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py @@ -41,9 +41,8 @@ class SalesforceSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is - Insert. Possible values include: 'Insert', 'Upsert' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + Insert. + :type write_behavior: object :param external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). @@ -71,7 +70,7 @@ class SalesforceSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py index 4f2590c3ab9d..57a10411f487 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py @@ -37,9 +37,8 @@ class SalesforceSource(CopySource): string). :type query: object :param read_behavior: The read behavior for the operation. Default is - Query. Possible values include: 'Query', 'QueryAll' - :type read_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + Query. + :type read_behavior: object """ _validation = { @@ -53,7 +52,7 @@ class SalesforceSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, - 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py index 4441e92eaff3..08e6776f5f98 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py @@ -37,9 +37,8 @@ class SalesforceSource(CopySource): string). :type query: object :param read_behavior: The read behavior for the operation. Default is - Query. Possible values include: 'Query', 'QueryAll' - :type read_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + Query. + :type read_behavior: object """ _validation = { @@ -53,7 +52,7 @@ class SalesforceSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, - 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py index e5a37858abb5..ae99093f277e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py @@ -41,9 +41,8 @@ class SapCloudForCustomerSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is - 'Insert'. Possible values include: 'Insert', 'Update' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior + 'Insert'. + :type write_behavior: object """ _validation = { @@ -58,7 +57,7 @@ class SapCloudForCustomerSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py index 29f01fdd6891..bdbc2cefcbd1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py @@ -41,9 +41,8 @@ class SapCloudForCustomerSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is - 'Insert'. Possible values include: 'Insert', 'Update' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior + 'Insert'. + :type write_behavior: object """ _validation = { @@ -58,7 +57,7 @@ class SapCloudForCustomerSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py index e4f10113aecd..f79367f49b3d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py @@ -45,7 +45,7 @@ class SapEccResourceDataset(Dataset): :type type: str :param path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). - :type path: str + :type path: object """ _validation = { @@ -64,7 +64,7 @@ class SapEccResourceDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py index 08bf742dc415..76aaeb9bb9f2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py @@ -45,7 +45,7 @@ class SapEccResourceDataset(Dataset): :type type: str :param path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). - :type path: str + :type path: object """ _validation = { @@ -64,10 +64,10 @@ class SapEccResourceDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, } - def __init__(self, *, linked_service_name, path: str, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: super(SapEccResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.path = path self.type = 'SapEccResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py index 6a2d17862d6b..6379c33713d4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py @@ -35,7 +35,7 @@ class SapEccSource(CopySource): :type type: str :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :type query: str + :type query: object """ _validation = { @@ -48,7 +48,7 @@ class SapEccSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py index 95a11500bd24..4412cac39960 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py @@ -35,7 +35,7 @@ class SapEccSource(CopySource): :type type: str :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :type query: str + :type query: object """ _validation = { @@ -48,10 +48,10 @@ class SapEccSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query: str=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'SapEccSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py index 391bd79f8c28..14eda87b7be6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py @@ -33,6 +33,9 @@ class SapHanaLinkedService(LinkedService): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str + :param connection_string: SAP HANA ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object :param server: Required. Host name of the SAP HANA server. Type: string (or Expression with resultType string). :type server: object @@ -63,6 +66,7 @@ class SapHanaLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, @@ -72,6 +76,7 @@ class SapHanaLinkedService(LinkedService): def __init__(self, **kwargs): super(SapHanaLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) self.server = kwargs.get('server', None) self.authentication_type = kwargs.get('authentication_type', None) self.user_name = kwargs.get('user_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py index bbf307d1bede..de378a5b2bf3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py @@ -33,6 +33,9 @@ class SapHanaLinkedService(LinkedService): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str + :param connection_string: SAP HANA ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object :param server: Required. Host name of the SAP HANA server. Type: string (or Expression with resultType string). :type server: object @@ -63,6 +66,7 @@ class SapHanaLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, @@ -70,8 +74,9 @@ class SapHanaLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, server, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, server, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string self.server = server self.authentication_type = authentication_type self.user_name = user_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py new file mode 100644 index 000000000000..e946dbcd9a50 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SapHanaSource(CopySource): + """A copy activity source for SAP HANA source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP HANA Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param packet_size: The packet size of data read from SAP HANA. Type: + integer(or Expression with resultType integer). + :type packet_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'packet_size': {'key': 'packetSize', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapHanaSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.packet_size = kwargs.get('packet_size', None) + self.type = 'SapHanaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py new file mode 100644 index 000000000000..730326c19183 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapHanaSource(CopySource): + """A copy activity source for SAP HANA source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP HANA Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param packet_size: The packet size of data read from SAP HANA. Type: + integer(or Expression with resultType integer). + :type packet_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'packet_size': {'key': 'packetSize', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, packet_size=None, **kwargs) -> None: + super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.packet_size = packet_size + self.type = 'SapHanaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py new file mode 100644 index 000000000000..6ff1ae31cd22 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SapHanaTableDataset(Dataset): + """SAP HANA Table properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param sap_hana_table_dataset_schema: The schema name of SAP HANA. Type: + string (or Expression with resultType string). + :type sap_hana_table_dataset_schema: object + :param table: The table name of SAP HANA. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sap_hana_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapHanaTableDataset, self).__init__(**kwargs) + self.sap_hana_table_dataset_schema = kwargs.get('sap_hana_table_dataset_schema', None) + self.table = kwargs.get('table', None) + self.type = 'SapHanaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py new file mode 100644 index 000000000000..6dc5c48ba21d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapHanaTableDataset(Dataset): + """SAP HANA Table properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param sap_hana_table_dataset_schema: The schema name of SAP HANA. Type: + string (or Expression with resultType string). + :type sap_hana_table_dataset_schema: object + :param table: The table name of SAP HANA. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sap_hana_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, sap_hana_table_dataset_schema=None, table=None, **kwargs) -> None: + super(SapHanaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.sap_hana_table_dataset_schema = sap_hana_table_dataset_schema + self.table = table + self.type = 'SapHanaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py new file mode 100644 index 000000000000..83b76d0a4fdd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py @@ -0,0 +1,140 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SapTableLinkedService(LinkedService): + """SAP Table Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Host name of the SAP instance where the table is located. + Type: string (or Expression with resultType string). + :type server: object + :param system_number: System number of the SAP system where the table is + located. (Usually a two-digit decimal number represented as a string.) + Type: string (or Expression with resultType string). + :type system_number: object + :param client_id: Client ID of the client on the SAP system where the + table is located. (Usually a three-digit decimal number represented as a + string) Type: string (or Expression with resultType string). + :type client_id: object + :param language: Language of the SAP system where the table is located. + The default value is EN. Type: string (or Expression with resultType + string). + :type language: object + :param system_id: SystemID of the SAP system where the table is located. + Type: string (or Expression with resultType string). + :type system_id: object + :param user_name: Username to access the SAP server where the table is + located. Type: string (or Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP server where the table is + located. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param message_server: The hostname of the SAP Message Server. Type: + string (or Expression with resultType string). + :type message_server: object + :param message_server_service: The service name or port number of the + Message Server. Type: string (or Expression with resultType string). + :type message_server_service: object + :param snc_mode: SNC activation indicator to access the SAP server where + the table is located. Must be either 0 (off) or 1 (on). Type: string (or + Expression with resultType string). + :type snc_mode: object + :param snc_my_name: Initiator's SNC name to access the SAP server where + the table is located. Type: string (or Expression with resultType string). + :type snc_my_name: object + :param snc_partner_name: Communication partner's SNC name to access the + SAP server where the table is located. Type: string (or Expression with + resultType string). + :type snc_partner_name: object + :param snc_library_path: External security product's library to access the + SAP server where the table is located. Type: string (or Expression with + resultType string). + :type snc_library_path: object + :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, + 8, 9. Type: string (or Expression with resultType string). + :type snc_qop: object + :param logon_group: The Logon Group for the SAP System. Type: string (or + Expression with resultType string). + :type logon_group: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, + 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, + 'snc_mode': {'key': 'typeProperties.sncMode', 'type': 'object'}, + 'snc_my_name': {'key': 'typeProperties.sncMyName', 'type': 'object'}, + 'snc_partner_name': {'key': 'typeProperties.sncPartnerName', 'type': 'object'}, + 'snc_library_path': {'key': 'typeProperties.sncLibraryPath', 'type': 'object'}, + 'snc_qop': {'key': 'typeProperties.sncQop', 'type': 'object'}, + 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapTableLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.system_number = kwargs.get('system_number', None) + self.client_id = kwargs.get('client_id', None) + self.language = kwargs.get('language', None) + self.system_id = kwargs.get('system_id', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.message_server = kwargs.get('message_server', None) + self.message_server_service = kwargs.get('message_server_service', None) + self.snc_mode = kwargs.get('snc_mode', None) + self.snc_my_name = kwargs.get('snc_my_name', None) + self.snc_partner_name = kwargs.get('snc_partner_name', None) + self.snc_library_path = kwargs.get('snc_library_path', None) + self.snc_qop = kwargs.get('snc_qop', None) + self.logon_group = kwargs.get('logon_group', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SapTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py new file mode 100644 index 000000000000..d098acc1bbda --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py @@ -0,0 +1,140 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SapTableLinkedService(LinkedService): + """SAP Table Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Host name of the SAP instance where the table is located. + Type: string (or Expression with resultType string). + :type server: object + :param system_number: System number of the SAP system where the table is + located. (Usually a two-digit decimal number represented as a string.) + Type: string (or Expression with resultType string). + :type system_number: object + :param client_id: Client ID of the client on the SAP system where the + table is located. (Usually a three-digit decimal number represented as a + string) Type: string (or Expression with resultType string). + :type client_id: object + :param language: Language of the SAP system where the table is located. + The default value is EN. Type: string (or Expression with resultType + string). + :type language: object + :param system_id: SystemID of the SAP system where the table is located. + Type: string (or Expression with resultType string). + :type system_id: object + :param user_name: Username to access the SAP server where the table is + located. Type: string (or Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP server where the table is + located. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param message_server: The hostname of the SAP Message Server. Type: + string (or Expression with resultType string). + :type message_server: object + :param message_server_service: The service name or port number of the + Message Server. Type: string (or Expression with resultType string). + :type message_server_service: object + :param snc_mode: SNC activation indicator to access the SAP server where + the table is located. Must be either 0 (off) or 1 (on). Type: string (or + Expression with resultType string). + :type snc_mode: object + :param snc_my_name: Initiator's SNC name to access the SAP server where + the table is located. Type: string (or Expression with resultType string). + :type snc_my_name: object + :param snc_partner_name: Communication partner's SNC name to access the + SAP server where the table is located. Type: string (or Expression with + resultType string). + :type snc_partner_name: object + :param snc_library_path: External security product's library to access the + SAP server where the table is located. Type: string (or Expression with + resultType string). + :type snc_library_path: object + :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, + 8, 9. Type: string (or Expression with resultType string). + :type snc_qop: object + :param logon_group: The Logon Group for the SAP System. Type: string (or + Expression with resultType string). + :type logon_group: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, + 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, + 'snc_mode': {'key': 'typeProperties.sncMode', 'type': 'object'}, + 'snc_my_name': {'key': 'typeProperties.sncMyName', 'type': 'object'}, + 'snc_partner_name': {'key': 'typeProperties.sncPartnerName', 'type': 'object'}, + 'snc_library_path': {'key': 'typeProperties.sncLibraryPath', 'type': 'object'}, + 'snc_qop': {'key': 'typeProperties.sncQop', 'type': 'object'}, + 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, server=None, system_number=None, client_id=None, language=None, system_id=None, user_name=None, password=None, message_server=None, message_server_service=None, snc_mode=None, snc_my_name=None, snc_partner_name=None, snc_library_path=None, snc_qop=None, logon_group=None, encrypted_credential=None, **kwargs) -> None: + super(SapTableLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.system_number = system_number + self.client_id = client_id + self.language = language + self.system_id = system_id + self.user_name = user_name + self.password = password + self.message_server = message_server + self.message_server_service = message_server_service + self.snc_mode = snc_mode + self.snc_my_name = snc_my_name + self.snc_partner_name = snc_partner_name + self.snc_library_path = snc_library_path + self.snc_qop = snc_qop + self.logon_group = logon_group + self.encrypted_credential = encrypted_credential + self.type = 'SapTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py new file mode 100644 index 000000000000..b688fe16683b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SapTablePartitionSettings(Model): + """The settings that will be leveraged for SAP table source partitioning. + + :param partition_column_name: The name of the column that will be used for + proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + :param max_partitions_number: The maximum value of partitions the table + will be split into. Type: integer (or Expression with resultType string). + :type max_partitions_number: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapTablePartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) + self.max_partitions_number = kwargs.get('max_partitions_number', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py new file mode 100644 index 000000000000..37bdf610ab35 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SapTablePartitionSettings(Model): + """The settings that will be leveraged for SAP table source partitioning. + + :param partition_column_name: The name of the column that will be used for + proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + :param max_partitions_number: The maximum value of partitions the table + will be split into. Type: integer (or Expression with resultType string). + :type max_partitions_number: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, + } + + def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, max_partitions_number=None, **kwargs) -> None: + super(SapTablePartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound + self.max_partitions_number = max_partitions_number diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py new file mode 100644 index 000000000000..24601ba6b793 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SapTableResourceDataset(Dataset): + """SAP Table Resource properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. The name of the SAP Table. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapTableResourceDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'SapTableResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py new file mode 100644 index 000000000000..7b034ccd3a91 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapTableResourceDataset(Dataset): + """SAP Table Resource properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. The name of the SAP Table. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(SapTableResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'SapTableResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py new file mode 100644 index 000000000000..1c52db3eb0f8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py @@ -0,0 +1,97 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SapTableSource(CopySource): + """A copy activity source for SAP Table source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param row_count: The number of rows to be retrieved. Type: integer(or + Expression with resultType integer). + :type row_count: object + :param row_skips: The number of rows that will be skipped. Type: integer + (or Expression with resultType integer). + :type row_skips: object + :param rfc_table_fields: The fields of the SAP table that will be + retrieved. For example, column0, column1. Type: string (or Expression with + resultType string). + :type rfc_table_fields: object + :param rfc_table_options: The options for the filtering of the SAP Table. + For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with + resultType string). + :type rfc_table_options: object + :param batch_size: Specifies the maximum number of rows that will be + retrieved at a time when retrieving data from SAP Table. Type: integer (or + Expression with resultType integer). + :type batch_size: object + :param custom_rfc_read_table_function_module: Specifies the custom RFC + function module that will be used to read data from SAP Table. Type: + string (or Expression with resultType string). + :type custom_rfc_read_table_function_module: object + :param partition_option: The partition mechanism that will be used for SAP + table read in parallel. + :type partition_option: object + :param partition_settings: The settings that will be leveraged for SAP + table source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.SapTablePartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'row_count': {'key': 'rowCount', 'type': 'object'}, + 'row_skips': {'key': 'rowSkips', 'type': 'object'}, + 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, + 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, + } + + def __init__(self, **kwargs): + super(SapTableSource, self).__init__(**kwargs) + self.row_count = kwargs.get('row_count', None) + self.row_skips = kwargs.get('row_skips', None) + self.rfc_table_fields = kwargs.get('rfc_table_fields', None) + self.rfc_table_options = kwargs.get('rfc_table_options', None) + self.batch_size = kwargs.get('batch_size', None) + self.custom_rfc_read_table_function_module = kwargs.get('custom_rfc_read_table_function_module', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) + self.type = 'SapTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py new file mode 100644 index 000000000000..8e8fbdf12002 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py @@ -0,0 +1,97 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapTableSource(CopySource): + """A copy activity source for SAP Table source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param row_count: The number of rows to be retrieved. Type: integer(or + Expression with resultType integer). + :type row_count: object + :param row_skips: The number of rows that will be skipped. Type: integer + (or Expression with resultType integer). + :type row_skips: object + :param rfc_table_fields: The fields of the SAP table that will be + retrieved. For example, column0, column1. Type: string (or Expression with + resultType string). + :type rfc_table_fields: object + :param rfc_table_options: The options for the filtering of the SAP Table. + For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with + resultType string). + :type rfc_table_options: object + :param batch_size: Specifies the maximum number of rows that will be + retrieved at a time when retrieving data from SAP Table. Type: integer (or + Expression with resultType integer). + :type batch_size: object + :param custom_rfc_read_table_function_module: Specifies the custom RFC + function module that will be used to read data from SAP Table. Type: + string (or Expression with resultType string). + :type custom_rfc_read_table_function_module: object + :param partition_option: The partition mechanism that will be used for SAP + table read in parallel. + :type partition_option: object + :param partition_settings: The settings that will be leveraged for SAP + table source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.SapTablePartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'row_count': {'key': 'rowCount', 'type': 'object'}, + 'row_skips': {'key': 'rowSkips', 'type': 'object'}, + 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, + 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, row_count=None, row_skips=None, rfc_table_fields=None, rfc_table_options=None, batch_size=None, custom_rfc_read_table_function_module=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.row_count = row_count + self.row_skips = row_skips + self.rfc_table_fields = rfc_table_fields + self.rfc_table_options = rfc_table_options + self.batch_size = batch_size + self.custom_rfc_read_table_function_module = custom_rfc_read_table_function_module + self.partition_option = partition_option + self.partition_settings = partition_settings + self.type = 'SapTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py new file mode 100644 index 000000000000..5b8fd4e42ba2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class SftpLocation(DatasetLocation): + """The location of SFTP dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SftpLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py new file mode 100644 index 000000000000..c5e2feafa971 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class SftpLocation(DatasetLocation): + """The location of SFTP dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(SftpLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting.py new file mode 100644 index 000000000000..e0cd7ea8fda1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class SftpReadSetting(ConnectorReadSetting): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Sftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SftpReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting_py3.py new file mode 100644 index 000000000000..39beb756905a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class SftpReadSetting(ConnectorReadSetting): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Sftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(SftpReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py new file mode 100644 index 000000000000..45b1f1273903 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class SqlServerSink(CopySink): + """A copy activity SQL server sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlServerSink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.type = 'SqlServerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py new file mode 100644 index 000000000000..dbe1bf44e418 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SqlServerSink(CopySink): + """A copy activity SQL server sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: + super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.type = 'SqlServerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py new file mode 100644 index 000000000000..f9aa011047ea --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SqlServerSource(CopySource): + """A copy activity SQL server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlServerSource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.type = 'SqlServerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py new file mode 100644 index 000000000000..27d12985e595 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SqlServerSource(CopySource): + """A copy activity SQL server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types + self.type = 'SqlServerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py index c0bc0b66a5e2..d50540de4704 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py @@ -43,15 +43,14 @@ class SqlServerTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: Required. The table name of the SQL Server dataset. - Type: string (or Expression with resultType string). + :param table_name: The table name of the SQL Server dataset. Type: string + (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py index 0fb8d10ea111..bc8d4bec92e0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py @@ -43,15 +43,14 @@ class SqlServerTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: Required. The table name of the SQL Server dataset. - Type: string (or Expression with resultType string). + :param table_name: The table name of the SQL Server dataset. Type: string + (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { @@ -67,7 +66,7 @@ class SqlServerTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(SqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'SqlServerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py index 9c2ebd2b389f..7ec0313aab4b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py @@ -52,6 +52,10 @@ class SqlSink(CopySink): :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object """ _validation = { @@ -70,6 +74,7 @@ class SqlSink(CopySink): 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, } def __init__(self, **kwargs): @@ -78,4 +83,5 @@ def __init__(self, **kwargs): self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) self.pre_copy_script = kwargs.get('pre_copy_script', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) self.type = 'SqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py index 115cc3a899e9..1f6bb9685082 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py @@ -52,6 +52,10 @@ class SqlSink(CopySink): :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object """ _validation = { @@ -70,12 +74,14 @@ class SqlSink(CopySink): 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name self.type = 'SqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py index 748cf7cba53c..ff16595aa8c7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py @@ -19,7 +19,7 @@ class StoredProcedureParameter(Model): Expression with resultType string). :type value: object :param type: Stored procedure parameter type. Possible values include: - 'String', 'Int', 'Decimal', 'Guid', 'Boolean', 'Date' + 'String', 'Int', 'Int64', 'Decimal', 'Guid', 'Boolean', 'Date' :type type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType """ diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py index bd967ce52876..2842ef9ae35c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py @@ -19,7 +19,7 @@ class StoredProcedureParameter(Model): Expression with resultType string). :type value: object :param type: Stored procedure parameter type. Possible values include: - 'String', 'Int', 'Decimal', 'Guid', 'Boolean', 'Date' + 'String', 'Int', 'Int64', 'Decimal', 'Guid', 'Boolean', 'Date' :type type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType """ diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator.py deleted file mode 100644 index 043c537ad860..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_translator import CopyTranslator - - -class TabularTranslator(CopyTranslator): - """A copy activity tabular translator. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - :param column_mappings: Column mappings. Example: "UserId: MyUserId, - Group: MyGroup, Name: MyName" Type: string (or Expression with resultType - string). - :type column_mappings: object - :param schema_mapping: The schema mapping to map between tabular data and - hierarchical data. Example: {"Column1": "$.Column1", "Column2": - "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or - Expression with resultType object). - :type schema_mapping: object - :param collection_reference: The JSON Path of the Nested Array that is - going to do cross-apply. Type: object (or Expression with resultType - object). - :type collection_reference: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'column_mappings': {'key': 'columnMappings', 'type': 'object'}, - 'schema_mapping': {'key': 'schemaMapping', 'type': 'object'}, - 'collection_reference': {'key': 'collectionReference', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(TabularTranslator, self).__init__(**kwargs) - self.column_mappings = kwargs.get('column_mappings', None) - self.schema_mapping = kwargs.get('schema_mapping', None) - self.collection_reference = kwargs.get('collection_reference', None) - self.type = 'TabularTranslator' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator_py3.py deleted file mode 100644 index cb1c11e5bb53..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_translator_py3 import CopyTranslator - - -class TabularTranslator(CopyTranslator): - """A copy activity tabular translator. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - :param column_mappings: Column mappings. Example: "UserId: MyUserId, - Group: MyGroup, Name: MyName" Type: string (or Expression with resultType - string). - :type column_mappings: object - :param schema_mapping: The schema mapping to map between tabular data and - hierarchical data. Example: {"Column1": "$.Column1", "Column2": - "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or - Expression with resultType object). - :type schema_mapping: object - :param collection_reference: The JSON Path of the Nested Array that is - going to do cross-apply. Type: object (or Expression with resultType - object). - :type collection_reference: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'column_mappings': {'key': 'columnMappings', 'type': 'object'}, - 'schema_mapping': {'key': 'schemaMapping', 'type': 'object'}, - 'collection_reference': {'key': 'collectionReference', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, column_mappings=None, schema_mapping=None, collection_reference=None, **kwargs) -> None: - super(TabularTranslator, self).__init__(additional_properties=additional_properties, **kwargs) - self.column_mappings = column_mappings - self.schema_mapping = schema_mapping - self.collection_reference = collection_reference - self.type = 'TabularTranslator'