diff --git a/sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst b/sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst index c2beec74971f..d6473844c5c8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst +++ b/sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst @@ -3,6 +3,268 @@ Release History =============== +0.8.0 (2019-08-30) +++++++++++++++++++ + +**Features** + +- Model HubspotSource has a new parameter max_concurrent_connections +- Model CouchbaseSource has a new parameter max_concurrent_connections +- Model HttpSource has a new parameter max_concurrent_connections +- Model AzureDataLakeStoreSource has a new parameter max_concurrent_connections +- Model ConcurSource has a new parameter max_concurrent_connections +- Model FileShareDataset has a new parameter modified_datetime_start +- Model FileShareDataset has a new parameter modified_datetime_end +- Model SalesforceSource has a new parameter max_concurrent_connections +- Model NetezzaSource has a new parameter partition_option +- Model NetezzaSource has a new parameter max_concurrent_connections +- Model NetezzaSource has a new parameter partition_settings +- Model AzureMySqlSource has a new parameter max_concurrent_connections +- Model OdbcSink has a new parameter max_concurrent_connections +- Model ImpalaObjectDataset has a new parameter impala_object_dataset_schema +- Model ImpalaObjectDataset has a new parameter table +- Model AzureSqlDWTableDataset has a new parameter azure_sql_dw_table_dataset_schema +- Model AzureSqlDWTableDataset has a new parameter table +- Model SapEccSource has a new parameter max_concurrent_connections +- Model CopySource has a new parameter max_concurrent_connections +- Model ServiceNowSource has a new parameter max_concurrent_connections +- Model Trigger has a new parameter annotations +- Model CassandraSource has a new parameter max_concurrent_connections +- Model AzureQueueSink has a new parameter max_concurrent_connections +- Model DrillSource has a new parameter max_concurrent_connections +- Model DocumentDbCollectionSink has a new parameter write_behavior +- Model DocumentDbCollectionSink has a new parameter max_concurrent_connections +- Model SapHanaLinkedService has a new parameter connection_string +- Model SalesforceSink has a new parameter max_concurrent_connections +- Model HiveObjectDataset has a new parameter hive_object_dataset_schema +- Model HiveObjectDataset has a new parameter table +- Model GoogleBigQueryObjectDataset has a new parameter dataset +- Model GoogleBigQueryObjectDataset has a new parameter table +- Model FileSystemSource has a new parameter max_concurrent_connections +- Model SqlSink has a new parameter stored_procedure_table_type_parameter_name +- Model SqlSink has a new parameter max_concurrent_connections +- Model CopySink has a new parameter max_concurrent_connections +- Model SapCloudForCustomerSource has a new parameter max_concurrent_connections +- Model CopyActivity has a new parameter preserve_rules +- Model CopyActivity has a new parameter preserve +- Model AmazonMWSSource has a new parameter max_concurrent_connections +- Model SqlDWSink has a new parameter max_concurrent_connections +- Model MagentoSource has a new parameter max_concurrent_connections +- Model BlobEventsTrigger has a new parameter annotations +- Model DynamicsSink has a new parameter max_concurrent_connections +- Model AzurePostgreSqlTableDataset has a new parameter table +- Model AzurePostgreSqlTableDataset has a new parameter azure_postgre_sql_table_dataset_schema +- Model SqlServerTableDataset has a new parameter sql_server_table_dataset_schema +- Model SqlServerTableDataset has a new parameter table +- Model DocumentDbCollectionSource has a new parameter max_concurrent_connections +- Model AzurePostgreSqlSource has a new parameter max_concurrent_connections +- Model BlobSource has a new parameter max_concurrent_connections +- Model VerticaTableDataset has a new parameter vertica_table_dataset_schema +- Model VerticaTableDataset has a new parameter table +- Model PhoenixObjectDataset has a new parameter phoenix_object_dataset_schema +- Model PhoenixObjectDataset has a new parameter table +- Model AzureSearchIndexSink has a new parameter max_concurrent_connections +- Model MarketoSource has a new parameter max_concurrent_connections +- Model DynamicsSource has a new parameter max_concurrent_connections +- Model SparkObjectDataset has a new parameter spark_object_dataset_schema +- Model SparkObjectDataset has a new parameter table +- Model XeroSource has a new parameter max_concurrent_connections +- Model AmazonRedshiftSource has a new parameter max_concurrent_connections +- Model CustomActivity has a new parameter retention_time_in_days +- Model WebSource has a new parameter max_concurrent_connections +- Model GreenplumTableDataset has a new parameter greenplum_table_dataset_schema +- Model GreenplumTableDataset has a new parameter table +- Model SalesforceMarketingCloudSource has a new parameter max_concurrent_connections +- Model GoogleBigQuerySource has a new parameter max_concurrent_connections +- Model JiraSource has a new parameter max_concurrent_connections +- Model MongoDbSource has a new parameter max_concurrent_connections +- Model DrillTableDataset has a new parameter drill_table_dataset_schema +- Model DrillTableDataset has a new parameter table +- Model ExecuteSSISPackageActivity has a new parameter log_location +- Model SparkSource has a new parameter max_concurrent_connections +- Model AzureTableSink has a new parameter max_concurrent_connections +- Model AzureDataLakeStoreSink has a new parameter enable_adls_single_file_parallel +- Model AzureDataLakeStoreSink has a new parameter max_concurrent_connections +- Model PrestoSource has a new parameter max_concurrent_connections +- Model RelationalSource has a new parameter max_concurrent_connections +- Model TumblingWindowTrigger has a new parameter annotations +- Model ImpalaSource has a new parameter max_concurrent_connections +- Model ScheduleTrigger has a new parameter annotations +- Model QuickBooksSource has a new parameter max_concurrent_connections +- Model PrestoObjectDataset has a new parameter presto_object_dataset_schema +- Model PrestoObjectDataset has a new parameter table +- Model OracleSink has a new parameter max_concurrent_connections +- Model HdfsSource has a new parameter max_concurrent_connections +- Model PhoenixSource has a new parameter max_concurrent_connections +- Model SapCloudForCustomerSink has a new parameter max_concurrent_connections +- Model SquareSource has a new parameter max_concurrent_connections +- Model OracleSource has a new parameter partition_option +- Model OracleSource has a new parameter max_concurrent_connections +- Model OracleSource has a new parameter partition_settings +- Model BlobTrigger has a new parameter annotations +- Model HDInsightOnDemandLinkedService has a new parameter virtual_network_id +- Model HDInsightOnDemandLinkedService has a new parameter subnet_name +- Model AmazonS3LinkedService has a new parameter service_url +- Model HDInsightLinkedService has a new parameter file_system +- Model MultiplePipelineTrigger has a new parameter annotations +- Model HBaseSource has a new parameter max_concurrent_connections +- Model OracleTableDataset has a new parameter oracle_table_dataset_schema +- Model OracleTableDataset has a new parameter table +- Model RerunTumblingWindowTrigger has a new parameter annotations +- Model EloquaSource has a new parameter max_concurrent_connections +- Model AzureSqlTableDataset has a new parameter azure_sql_table_dataset_schema +- Model AzureSqlTableDataset has a new parameter table +- Model BlobSink has a new parameter max_concurrent_connections +- Model HiveSource has a new parameter max_concurrent_connections +- Model SqlSource has a new parameter max_concurrent_connections +- Model PaypalSource has a new parameter max_concurrent_connections +- Model AzureBlobDataset has a new parameter modified_datetime_start +- Model AzureBlobDataset has a new parameter modified_datetime_end +- Model VerticaSource has a new parameter max_concurrent_connections +- Model AmazonS3Dataset has a new parameter modified_datetime_start +- Model AmazonS3Dataset has a new parameter modified_datetime_end +- Model PipelineRun has a new parameter run_group_id +- Model PipelineRun has a new parameter is_latest +- Model ShopifySource has a new parameter max_concurrent_connections +- Model MariaDBSource has a new parameter max_concurrent_connections +- Model TeradataLinkedService has a new parameter connection_string +- Model ODataLinkedService has a new parameter service_principal_embedded_cert +- Model ODataLinkedService has a new parameter aad_service_principal_credential_type +- Model ODataLinkedService has a new parameter service_principal_key +- Model ODataLinkedService has a new parameter service_principal_id +- Model ODataLinkedService has a new parameter aad_resource_id +- Model ODataLinkedService has a new parameter service_principal_embedded_cert_password +- Model ODataLinkedService has a new parameter tenant +- Model AzureTableSource has a new parameter max_concurrent_connections +- Model IntegrationRuntimeSsisProperties has a new parameter data_proxy_properties +- Model ZohoSource has a new parameter max_concurrent_connections +- Model ResponsysSource has a new parameter max_concurrent_connections +- Model FileSystemSink has a new parameter max_concurrent_connections +- Model SqlDWSource has a new parameter max_concurrent_connections +- Model GreenplumSource has a new parameter max_concurrent_connections +- Model AzureDatabricksLinkedService has a new parameter new_cluster_init_scripts +- Model AzureDatabricksLinkedService has a new parameter new_cluster_driver_node_type +- Model AzureDatabricksLinkedService has a new parameter new_cluster_enable_elastic_disk +- Added operation TriggerRunsOperations.rerun +- Added operation ExposureControlOperations.get_feature_value_by_factory +- Added model Office365Dataset +- Added model AzureBlobFSDataset +- Added model CommonDataServiceForAppsEntityDataset +- Added model DynamicsCrmEntityDataset +- Added model AzureSqlMITableDataset +- Added model HdfsLocation +- Added model HttpServerLocation +- Added model SftpLocation +- Added model FtpServerLocation +- Added model FileServerLocation +- Added model AmazonS3Location +- Added model AzureDataLakeStoreLocation +- Added model AzureBlobFSLocation +- Added model AzureBlobStorageLocation +- Added model DatasetLocation +- Added model BinaryDataset +- Added model JsonDataset +- Added model DelimitedTextDataset +- Added model ParquetDataset +- Added model AvroDataset +- Added model GoogleAdWordsSource +- Added model OracleServiceCloudSource +- Added model DynamicsAXSource +- Added model NetezzaPartitionSettings +- Added model AzureMariaDBSource +- Added model AzureBlobFSSource +- Added model Office365Source +- Added model MongoDbCursorMethodsProperties +- Added model CosmosDbMongoDbApiSource +- Added model MongoDbV2Source +- Added model TeradataPartitionSettings +- Added model TeradataSource +- Added model OraclePartitionSettings +- Added model AzureDataExplorerSource +- Added model SqlMISource +- Added model AzureSqlSource +- Added model SqlServerSource +- Added model RestSource +- Added model SapTablePartitionSettings +- Added model SapTableSource +- Added model SapOpenHubSource +- Added model SapHanaSource +- Added model SalesforceServiceCloudSource +- Added model ODataSource +- Added model SapBwSource +- Added model SybaseSource +- Added model PostgreSqlSource +- Added model MySqlSource +- Added model OdbcSource +- Added model Db2Source +- Added model MicrosoftAccessSource +- Added model InformixSource +- Added model CommonDataServiceForAppsSource +- Added model DynamicsCrmSource +- Added model HdfsReadSettings +- Added model HttpReadSettings +- Added model SftpReadSettings +- Added model FtpReadSettings +- Added model FileServerReadSettings +- Added model AmazonS3ReadSettings +- Added model AzureDataLakeStoreReadSettings +- Added model AzureBlobFSReadSettings +- Added model AzureBlobStorageReadSettings +- Added model StoreReadSettings +- Added model BinarySource +- Added model JsonSource +- Added model FormatReadSettings +- Added model DelimitedTextReadSettings +- Added model DelimitedTextSource +- Added model ParquetSource +- Added model AvroSource +- Added model AzureDataExplorerCommandActivity +- Added model SSISAccessCredential +- Added model SSISLogLocation +- Added model CosmosDbMongoDbApiSink +- Added model SalesforceServiceCloudSink +- Added model AzureDataExplorerSink +- Added model CommonDataServiceForAppsSink +- Added model DynamicsCrmSink +- Added model MicrosoftAccessSink +- Added model InformixSink +- Added model AzureBlobFSSink +- Added model SqlMISink +- Added model AzureSqlSink +- Added model SqlServerSink +- Added model FileServerWriteSettings +- Added model AzureDataLakeStoreWriteSettings +- Added model AzureBlobFSWriteSettings +- Added model AzureBlobStorageWriteSettings +- Added model StoreWriteSettings +- Added model BinarySink +- Added model ParquetSink +- Added model JsonWriteSettings +- Added model DelimitedTextWriteSettings +- Added model FormatWriteSettings +- Added model AvroWriteSettings +- Added model AvroSink +- Added model AzureMySqlSink +- Added model AzurePostgreSqlSink +- Added model JsonSink +- Added model DelimitedTextSink +- Added model WebHookActivity +- Added model ValidationActivity +- Added model EntityReference +- Added model IntegrationRuntimeDataProxyProperties +- Added model SsisVariable +- Added model SsisEnvironment +- Added model SsisParameter +- Added model SsisPackage +- Added model SsisEnvironmentReference +- Added model SsisProject +- Added model SsisFolder + +**Breaking changes** + +- Operation PipelinesOperations.create_run has a new signature +- Model SSISPackageLocation has a new signature + 0.7.0 (2019-01-31) ++++++++++++++++++ diff --git a/sdk/datafactory/azure-mgmt-datafactory/MANIFEST.in b/sdk/datafactory/azure-mgmt-datafactory/MANIFEST.in index 6ceb27f7a96e..e4884efef41b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/MANIFEST.in +++ b/sdk/datafactory/azure-mgmt-datafactory/MANIFEST.in @@ -1,3 +1,4 @@ +recursive-include tests *.py *.yaml include *.rst include azure/__init__.py include azure/mgmt/__init__.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/README.rst b/sdk/datafactory/azure-mgmt-datafactory/README.rst index 78282b6062d6..df8f42da7efb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/README.rst +++ b/sdk/datafactory/azure-mgmt-datafactory/README.rst @@ -6,7 +6,7 @@ This is the Microsoft Azure Data Factory Management Client Library. Azure Resource Manager (ARM) is the next generation of management APIs that replace the old Azure Service Management (ASM). -This package has been tested with Python 2.7, 3.4, 3.5, 3.6 and 3.7. +This package has been tested with Python 2.7, 3.5, 3.6 and 3.7. For the older Azure Service Management (ASM) libraries, see `azure-servicemanagement-legacy `__ library. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py index e49abccce72a..bb8a2a22fd77 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py @@ -25,8 +25,8 @@ from .operations.pipeline_runs_operations import PipelineRunsOperations from .operations.activity_runs_operations import ActivityRunsOperations from .operations.triggers_operations import TriggersOperations -from .operations.rerun_triggers_operations import RerunTriggersOperations from .operations.trigger_runs_operations import TriggerRunsOperations +from .operations.rerun_triggers_operations import RerunTriggersOperations from . import models @@ -92,10 +92,10 @@ class DataFactoryManagementClient(SDKClient): :vartype activity_runs: azure.mgmt.datafactory.operations.ActivityRunsOperations :ivar triggers: Triggers operations :vartype triggers: azure.mgmt.datafactory.operations.TriggersOperations - :ivar rerun_triggers: RerunTriggers operations - :vartype rerun_triggers: azure.mgmt.datafactory.operations.RerunTriggersOperations :ivar trigger_runs: TriggerRuns operations :vartype trigger_runs: azure.mgmt.datafactory.operations.TriggerRunsOperations + :ivar rerun_triggers: RerunTriggers operations + :vartype rerun_triggers: azure.mgmt.datafactory.operations.RerunTriggersOperations :param credentials: Credentials needed for the client to connect to Azure. :type credentials: :mod:`A msrestazure Credentials @@ -140,7 +140,7 @@ def __init__( self._client, self.config, self._serialize, self._deserialize) self.triggers = TriggersOperations( self._client, self.config, self._serialize, self._deserialize) - self.rerun_triggers = RerunTriggersOperations( - self._client, self.config, self._serialize, self._deserialize) self.trigger_runs = TriggerRunsOperations( self._client, self.config, self._serialize, self._deserialize) + self.rerun_triggers = RerunTriggersOperations( + self._client, self.config, self._serialize, self._deserialize) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 46e9bf12bf1a..a04232d5b860 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -45,6 +45,7 @@ from .trigger_py3 import Trigger from .trigger_resource_py3 import TriggerResource from .create_run_response_py3 import CreateRunResponse + from .trigger_subscription_operation_status_py3 import TriggerSubscriptionOperationStatus from .factory_vsts_configuration_py3 import FactoryVSTSConfiguration from .factory_git_hub_configuration_py3 import FactoryGitHubConfiguration from .factory_repo_update_py3 import FactoryRepoUpdate @@ -95,6 +96,11 @@ from .schedule_trigger_py3 import ScheduleTrigger from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger from .azure_function_linked_service_py3 import AzureFunctionLinkedService + from .azure_data_explorer_linked_service_py3 import AzureDataExplorerLinkedService + from .sap_table_linked_service_py3 import SapTableLinkedService + from .google_ad_words_linked_service_py3 import GoogleAdWordsLinkedService + from .oracle_service_cloud_linked_service_py3 import OracleServiceCloudLinkedService + from .dynamics_ax_linked_service_py3 import DynamicsAXLinkedService from .responsys_linked_service_py3 import ResponsysLinkedService from .azure_databricks_linked_service_py3 import AzureDatabricksLinkedService from .azure_data_lake_analytics_linked_service_py3 import AzureDataLakeAnalyticsLinkedService @@ -114,6 +120,7 @@ from .phoenix_linked_service_py3 import PhoenixLinkedService from .paypal_linked_service_py3 import PaypalLinkedService from .marketo_linked_service_py3 import MarketoLinkedService + from .azure_maria_db_linked_service_py3 import AzureMariaDBLinkedService from .maria_db_linked_service_py3 import MariaDBLinkedService from .magento_linked_service_py3 import MagentoLinkedService from .jira_linked_service_py3 import JiraLinkedService @@ -138,10 +145,17 @@ from .custom_data_source_linked_service_py3 import CustomDataSourceLinkedService from .amazon_redshift_linked_service_py3 import AmazonRedshiftLinkedService from .amazon_s3_linked_service_py3 import AmazonS3LinkedService + from .rest_service_linked_service_py3 import RestServiceLinkedService + from .sap_open_hub_linked_service_py3 import SapOpenHubLinkedService from .sap_ecc_linked_service_py3 import SapEccLinkedService from .sap_cloud_for_customer_linked_service_py3 import SapCloudForCustomerLinkedService + from .salesforce_service_cloud_linked_service_py3 import SalesforceServiceCloudLinkedService from .salesforce_linked_service_py3 import SalesforceLinkedService + from .office365_linked_service_py3 import Office365LinkedService + from .azure_blob_fs_linked_service_py3 import AzureBlobFSLinkedService from .azure_data_lake_store_linked_service_py3 import AzureDataLakeStoreLinkedService + from .cosmos_db_mongo_db_api_linked_service_py3 import CosmosDbMongoDbApiLinkedService + from .mongo_db_v2_linked_service_py3 import MongoDbV2LinkedService from .mongo_db_linked_service_py3 import MongoDbLinkedService from .cassandra_linked_service_py3 import CassandraLinkedService from .web_client_certificate_authentication_py3 import WebClientCertificateAuthentication @@ -151,6 +165,8 @@ from .web_linked_service_py3 import WebLinkedService from .odata_linked_service_py3 import ODataLinkedService from .hdfs_linked_service_py3 import HdfsLinkedService + from .microsoft_access_linked_service_py3 import MicrosoftAccessLinkedService + from .informix_linked_service_py3 import InformixLinkedService from .odbc_linked_service_py3 import OdbcLinkedService from .azure_ml_linked_service_py3 import AzureMLLinkedService from .teradata_linked_service_py3 import TeradataLinkedService @@ -162,16 +178,23 @@ from .oracle_linked_service_py3 import OracleLinkedService from .file_server_linked_service_py3 import FileServerLinkedService from .hd_insight_linked_service_py3 import HDInsightLinkedService + from .common_data_service_for_apps_linked_service_py3 import CommonDataServiceForAppsLinkedService + from .dynamics_crm_linked_service_py3 import DynamicsCrmLinkedService from .dynamics_linked_service_py3 import DynamicsLinkedService from .cosmos_db_linked_service_py3 import CosmosDbLinkedService from .azure_key_vault_linked_service_py3 import AzureKeyVaultLinkedService from .azure_batch_linked_service_py3 import AzureBatchLinkedService + from .azure_sql_mi_linked_service_py3 import AzureSqlMILinkedService from .azure_sql_database_linked_service_py3 import AzureSqlDatabaseLinkedService from .sql_server_linked_service_py3 import SqlServerLinkedService from .azure_sql_dw_linked_service_py3 import AzureSqlDWLinkedService from .azure_table_storage_linked_service_py3 import AzureTableStorageLinkedService from .azure_blob_storage_linked_service_py3 import AzureBlobStorageLinkedService from .azure_storage_linked_service_py3 import AzureStorageLinkedService + from .google_ad_words_object_dataset_py3 import GoogleAdWordsObjectDataset + from .azure_data_explorer_table_dataset_py3 import AzureDataExplorerTableDataset + from .oracle_service_cloud_object_dataset_py3 import OracleServiceCloudObjectDataset + from .dynamics_ax_resource_dataset_py3 import DynamicsAXResourceDataset from .responsys_object_dataset_py3 import ResponsysObjectDataset from .salesforce_marketing_cloud_object_dataset_py3 import SalesforceMarketingCloudObjectDataset from .vertica_table_dataset_py3 import VerticaTableDataset @@ -187,6 +210,7 @@ from .phoenix_object_dataset_py3 import PhoenixObjectDataset from .paypal_object_dataset_py3 import PaypalObjectDataset from .marketo_object_dataset_py3 import MarketoObjectDataset + from .azure_maria_db_table_dataset_py3 import AzureMariaDBTableDataset from .maria_db_table_dataset_py3 import MariaDBTableDataset from .magento_object_dataset_py3 import MagentoObjectDataset from .jira_object_dataset_py3 import JiraObjectDataset @@ -216,25 +240,62 @@ from .http_dataset_py3 import HttpDataset from .azure_search_index_dataset_py3 import AzureSearchIndexDataset from .web_table_dataset_py3 import WebTableDataset + from .sap_table_resource_dataset_py3 import SapTableResourceDataset + from .rest_resource_dataset_py3 import RestResourceDataset from .sql_server_table_dataset_py3 import SqlServerTableDataset + from .sap_open_hub_table_dataset_py3 import SapOpenHubTableDataset + from .sap_hana_table_dataset_py3 import SapHanaTableDataset from .sap_ecc_resource_dataset_py3 import SapEccResourceDataset from .sap_cloud_for_customer_resource_dataset_py3 import SapCloudForCustomerResourceDataset + from .sap_bw_cube_dataset_py3 import SapBwCubeDataset + from .sybase_table_dataset_py3 import SybaseTableDataset + from .salesforce_service_cloud_object_dataset_py3 import SalesforceServiceCloudObjectDataset from .salesforce_object_dataset_py3 import SalesforceObjectDataset + from .microsoft_access_table_dataset_py3 import MicrosoftAccessTableDataset + from .postgre_sql_table_dataset_py3 import PostgreSqlTableDataset + from .my_sql_table_dataset_py3 import MySqlTableDataset + from .odbc_table_dataset_py3 import OdbcTableDataset + from .informix_table_dataset_py3 import InformixTableDataset from .relational_table_dataset_py3 import RelationalTableDataset + from .db2_table_dataset_py3 import Db2TableDataset + from .amazon_redshift_table_dataset_py3 import AmazonRedshiftTableDataset from .azure_my_sql_table_dataset_py3 import AzureMySqlTableDataset + from .teradata_table_dataset_py3 import TeradataTableDataset from .oracle_table_dataset_py3 import OracleTableDataset from .odata_resource_dataset_py3 import ODataResourceDataset + from .cosmos_db_mongo_db_api_collection_dataset_py3 import CosmosDbMongoDbApiCollectionDataset + from .mongo_db_v2_collection_dataset_py3 import MongoDbV2CollectionDataset from .mongo_db_collection_dataset_py3 import MongoDbCollectionDataset from .file_share_dataset_py3 import FileShareDataset + from .office365_dataset_py3 import Office365Dataset + from .azure_blob_fs_dataset_py3 import AzureBlobFSDataset from .azure_data_lake_store_dataset_py3 import AzureDataLakeStoreDataset + from .common_data_service_for_apps_entity_dataset_py3 import CommonDataServiceForAppsEntityDataset + from .dynamics_crm_entity_dataset_py3 import DynamicsCrmEntityDataset from .dynamics_entity_dataset_py3 import DynamicsEntityDataset from .document_db_collection_dataset_py3 import DocumentDbCollectionDataset from .custom_dataset_py3 import CustomDataset from .cassandra_table_dataset_py3 import CassandraTableDataset from .azure_sql_dw_table_dataset_py3 import AzureSqlDWTableDataset + from .azure_sql_mi_table_dataset_py3 import AzureSqlMITableDataset from .azure_sql_table_dataset_py3 import AzureSqlTableDataset from .azure_table_dataset_py3 import AzureTableDataset from .azure_blob_dataset_py3 import AzureBlobDataset + from .hdfs_location_py3 import HdfsLocation + from .http_server_location_py3 import HttpServerLocation + from .sftp_location_py3 import SftpLocation + from .ftp_server_location_py3 import FtpServerLocation + from .file_server_location_py3 import FileServerLocation + from .amazon_s3_location_py3 import AmazonS3Location + from .azure_data_lake_store_location_py3 import AzureDataLakeStoreLocation + from .azure_blob_fs_location_py3 import AzureBlobFSLocation + from .azure_blob_storage_location_py3 import AzureBlobStorageLocation + from .dataset_location_py3 import DatasetLocation + from .binary_dataset_py3 import BinaryDataset + from .json_dataset_py3 import JsonDataset + from .delimited_text_dataset_py3 import DelimitedTextDataset + from .parquet_dataset_py3 import ParquetDataset + from .avro_dataset_py3 import AvroDataset from .amazon_s3_dataset_py3 import AmazonS3Dataset from .activity_policy_py3 import ActivityPolicy from .azure_function_activity_py3 import AzureFunctionActivity @@ -250,9 +311,13 @@ from .web_activity_py3 import WebActivity from .redshift_unload_settings_py3 import RedshiftUnloadSettings from .amazon_redshift_source_py3 import AmazonRedshiftSource + from .google_ad_words_source_py3 import GoogleAdWordsSource + from .oracle_service_cloud_source_py3 import OracleServiceCloudSource + from .dynamics_ax_source_py3 import DynamicsAXSource from .responsys_source_py3 import ResponsysSource from .salesforce_marketing_cloud_source_py3 import SalesforceMarketingCloudSource from .vertica_source_py3 import VerticaSource + from .netezza_partition_settings_py3 import NetezzaPartitionSettings from .netezza_source_py3 import NetezzaSource from .zoho_source_py3 import ZohoSource from .xero_source_py3 import XeroSource @@ -265,6 +330,7 @@ from .phoenix_source_py3 import PhoenixSource from .paypal_source_py3 import PaypalSource from .marketo_source_py3 import MarketoSource + from .azure_maria_db_source_py3 import AzureMariaDBSource from .maria_db_source_py3 import MariaDBSource from .magento_source_py3 import MagentoSource from .jira_source_py3 import JiraSource @@ -281,33 +347,82 @@ from .azure_postgre_sql_source_py3 import AzurePostgreSqlSource from .amazon_mws_source_py3 import AmazonMWSSource from .http_source_py3 import HttpSource + from .azure_blob_fs_source_py3 import AzureBlobFSSource from .azure_data_lake_store_source_py3 import AzureDataLakeStoreSource + from .office365_source_py3 import Office365Source + from .mongo_db_cursor_methods_properties_py3 import MongoDbCursorMethodsProperties + from .cosmos_db_mongo_db_api_source_py3 import CosmosDbMongoDbApiSource + from .mongo_db_v2_source_py3 import MongoDbV2Source from .mongo_db_source_py3 import MongoDbSource from .cassandra_source_py3 import CassandraSource from .web_source_py3 import WebSource + from .teradata_partition_settings_py3 import TeradataPartitionSettings + from .teradata_source_py3 import TeradataSource + from .oracle_partition_settings_py3 import OraclePartitionSettings from .oracle_source_py3 import OracleSource + from .azure_data_explorer_source_py3 import AzureDataExplorerSource from .azure_my_sql_source_py3 import AzureMySqlSource from .distcp_settings_py3 import DistcpSettings from .hdfs_source_py3 import HdfsSource from .file_system_source_py3 import FileSystemSource from .sql_dw_source_py3 import SqlDWSource from .stored_procedure_parameter_py3 import StoredProcedureParameter + from .sql_mi_source_py3 import SqlMISource + from .azure_sql_source_py3 import AzureSqlSource + from .sql_server_source_py3 import SqlServerSource from .sql_source_py3 import SqlSource + from .rest_source_py3 import RestSource + from .sap_table_partition_settings_py3 import SapTablePartitionSettings + from .sap_table_source_py3 import SapTableSource + from .sap_open_hub_source_py3 import SapOpenHubSource + from .sap_hana_source_py3 import SapHanaSource from .sap_ecc_source_py3 import SapEccSource from .sap_cloud_for_customer_source_py3 import SapCloudForCustomerSource + from .salesforce_service_cloud_source_py3 import SalesforceServiceCloudSource from .salesforce_source_py3 import SalesforceSource + from .odata_source_py3 import ODataSource + from .sap_bw_source_py3 import SapBwSource + from .sybase_source_py3 import SybaseSource + from .postgre_sql_source_py3 import PostgreSqlSource + from .my_sql_source_py3 import MySqlSource + from .odbc_source_py3 import OdbcSource + from .db2_source_py3 import Db2Source + from .microsoft_access_source_py3 import MicrosoftAccessSource + from .informix_source_py3 import InformixSource from .relational_source_py3 import RelationalSource + from .common_data_service_for_apps_source_py3 import CommonDataServiceForAppsSource + from .dynamics_crm_source_py3 import DynamicsCrmSource from .dynamics_source_py3 import DynamicsSource from .document_db_collection_source_py3 import DocumentDbCollectionSource from .blob_source_py3 import BlobSource from .azure_table_source_py3 import AzureTableSource + from .hdfs_read_settings_py3 import HdfsReadSettings + from .http_read_settings_py3 import HttpReadSettings + from .sftp_read_settings_py3 import SftpReadSettings + from .ftp_read_settings_py3 import FtpReadSettings + from .file_server_read_settings_py3 import FileServerReadSettings + from .amazon_s3_read_settings_py3 import AmazonS3ReadSettings + from .azure_data_lake_store_read_settings_py3 import AzureDataLakeStoreReadSettings + from .azure_blob_fs_read_settings_py3 import AzureBlobFSReadSettings + from .azure_blob_storage_read_settings_py3 import AzureBlobStorageReadSettings + from .store_read_settings_py3 import StoreReadSettings + from .binary_source_py3 import BinarySource + from .json_source_py3 import JsonSource + from .format_read_settings_py3 import FormatReadSettings + from .delimited_text_read_settings_py3 import DelimitedTextReadSettings + from .delimited_text_source_py3 import DelimitedTextSource + from .parquet_source_py3 import ParquetSource + from .avro_source_py3 import AvroSource from .copy_source_py3 import CopySource from .lookup_activity_py3 import LookupActivity + from .azure_data_explorer_command_activity_py3 import AzureDataExplorerCommandActivity from .log_storage_settings_py3 import LogStorageSettings from .delete_activity_py3 import DeleteActivity from .sql_server_stored_procedure_activity_py3 import SqlServerStoredProcedureActivity from .custom_activity_reference_object_py3 import CustomActivityReferenceObject from .custom_activity_py3 import CustomActivity + from .ssis_access_credential_py3 import SSISAccessCredential + from .ssis_log_location_py3 import SSISLogLocation from .ssis_property_override_py3 import SSISPropertyOverride from .ssis_execution_parameter_py3 import SSISExecutionParameter from .ssis_execution_credential_py3 import SSISExecutionCredential @@ -322,27 +437,56 @@ from .staging_settings_py3 import StagingSettings from .tabular_translator_py3 import TabularTranslator from .copy_translator_py3 import CopyTranslator + from .cosmos_db_mongo_db_api_sink_py3 import CosmosDbMongoDbApiSink + from .salesforce_service_cloud_sink_py3 import SalesforceServiceCloudSink from .salesforce_sink_py3 import SalesforceSink + from .azure_data_explorer_sink_py3 import AzureDataExplorerSink + from .common_data_service_for_apps_sink_py3 import CommonDataServiceForAppsSink + from .dynamics_crm_sink_py3 import DynamicsCrmSink from .dynamics_sink_py3 import DynamicsSink + from .microsoft_access_sink_py3 import MicrosoftAccessSink + from .informix_sink_py3 import InformixSink from .odbc_sink_py3 import OdbcSink from .azure_search_index_sink_py3 import AzureSearchIndexSink + from .azure_blob_fs_sink_py3 import AzureBlobFSSink from .azure_data_lake_store_sink_py3 import AzureDataLakeStoreSink from .oracle_sink_py3 import OracleSink from .polybase_settings_py3 import PolybaseSettings from .sql_dw_sink_py3 import SqlDWSink + from .sql_mi_sink_py3 import SqlMISink + from .azure_sql_sink_py3 import AzureSqlSink + from .sql_server_sink_py3 import SqlServerSink from .sql_sink_py3 import SqlSink from .document_db_collection_sink_py3 import DocumentDbCollectionSink from .file_system_sink_py3 import FileSystemSink from .blob_sink_py3 import BlobSink + from .file_server_write_settings_py3 import FileServerWriteSettings + from .azure_data_lake_store_write_settings_py3 import AzureDataLakeStoreWriteSettings + from .azure_blob_fs_write_settings_py3 import AzureBlobFSWriteSettings + from .azure_blob_storage_write_settings_py3 import AzureBlobStorageWriteSettings + from .store_write_settings_py3 import StoreWriteSettings + from .binary_sink_py3 import BinarySink + from .parquet_sink_py3 import ParquetSink + from .json_write_settings_py3 import JsonWriteSettings + from .delimited_text_write_settings_py3 import DelimitedTextWriteSettings + from .format_write_settings_py3 import FormatWriteSettings + from .avro_write_settings_py3 import AvroWriteSettings + from .avro_sink_py3 import AvroSink from .azure_table_sink_py3 import AzureTableSink from .azure_queue_sink_py3 import AzureQueueSink from .sap_cloud_for_customer_sink_py3 import SapCloudForCustomerSink + from .azure_my_sql_sink_py3 import AzureMySqlSink + from .azure_postgre_sql_sink_py3 import AzurePostgreSqlSink + from .json_sink_py3 import JsonSink + from .delimited_text_sink_py3 import DelimitedTextSink from .copy_sink_py3 import CopySink from .copy_activity_py3 import CopyActivity from .execution_activity_py3 import ExecutionActivity + from .web_hook_activity_py3 import WebHookActivity from .append_variable_activity_py3 import AppendVariableActivity from .set_variable_activity_py3 import SetVariableActivity from .filter_activity_py3 import FilterActivity + from .validation_activity_py3 import ValidationActivity from .until_activity_py3 import UntilActivity from .wait_activity_py3 import WaitActivity from .for_each_activity_py3 import ForEachActivity @@ -360,6 +504,8 @@ from .linked_integration_runtime_key_authorization_py3 import LinkedIntegrationRuntimeKeyAuthorization from .linked_integration_runtime_type_py3 import LinkedIntegrationRuntimeType from .self_hosted_integration_runtime_py3 import SelfHostedIntegrationRuntime + from .entity_reference_py3 import EntityReference + from .integration_runtime_data_proxy_properties_py3 import IntegrationRuntimeDataProxyProperties from .integration_runtime_custom_setup_script_properties_py3 import IntegrationRuntimeCustomSetupScriptProperties from .integration_runtime_ssis_catalog_info_py3 import IntegrationRuntimeSsisCatalogInfo from .integration_runtime_ssis_properties_py3 import IntegrationRuntimeSsisProperties @@ -367,6 +513,13 @@ from .integration_runtime_compute_properties_py3 import IntegrationRuntimeComputeProperties from .managed_integration_runtime_py3 import ManagedIntegrationRuntime from .integration_runtime_node_ip_address_py3 import IntegrationRuntimeNodeIpAddress + from .ssis_variable_py3 import SsisVariable + from .ssis_environment_py3 import SsisEnvironment + from .ssis_parameter_py3 import SsisParameter + from .ssis_package_py3 import SsisPackage + from .ssis_environment_reference_py3 import SsisEnvironmentReference + from .ssis_project_py3 import SsisProject + from .ssis_folder_py3 import SsisFolder from .ssis_object_metadata_py3 import SsisObjectMetadata from .ssis_object_metadata_list_response_py3 import SsisObjectMetadataListResponse from .integration_runtime_node_monitoring_data_py3 import IntegrationRuntimeNodeMonitoringData @@ -410,6 +563,7 @@ from .trigger import Trigger from .trigger_resource import TriggerResource from .create_run_response import CreateRunResponse + from .trigger_subscription_operation_status import TriggerSubscriptionOperationStatus from .factory_vsts_configuration import FactoryVSTSConfiguration from .factory_git_hub_configuration import FactoryGitHubConfiguration from .factory_repo_update import FactoryRepoUpdate @@ -460,6 +614,11 @@ from .schedule_trigger import ScheduleTrigger from .multiple_pipeline_trigger import MultiplePipelineTrigger from .azure_function_linked_service import AzureFunctionLinkedService + from .azure_data_explorer_linked_service import AzureDataExplorerLinkedService + from .sap_table_linked_service import SapTableLinkedService + from .google_ad_words_linked_service import GoogleAdWordsLinkedService + from .oracle_service_cloud_linked_service import OracleServiceCloudLinkedService + from .dynamics_ax_linked_service import DynamicsAXLinkedService from .responsys_linked_service import ResponsysLinkedService from .azure_databricks_linked_service import AzureDatabricksLinkedService from .azure_data_lake_analytics_linked_service import AzureDataLakeAnalyticsLinkedService @@ -479,6 +638,7 @@ from .phoenix_linked_service import PhoenixLinkedService from .paypal_linked_service import PaypalLinkedService from .marketo_linked_service import MarketoLinkedService + from .azure_maria_db_linked_service import AzureMariaDBLinkedService from .maria_db_linked_service import MariaDBLinkedService from .magento_linked_service import MagentoLinkedService from .jira_linked_service import JiraLinkedService @@ -503,10 +663,17 @@ from .custom_data_source_linked_service import CustomDataSourceLinkedService from .amazon_redshift_linked_service import AmazonRedshiftLinkedService from .amazon_s3_linked_service import AmazonS3LinkedService + from .rest_service_linked_service import RestServiceLinkedService + from .sap_open_hub_linked_service import SapOpenHubLinkedService from .sap_ecc_linked_service import SapEccLinkedService from .sap_cloud_for_customer_linked_service import SapCloudForCustomerLinkedService + from .salesforce_service_cloud_linked_service import SalesforceServiceCloudLinkedService from .salesforce_linked_service import SalesforceLinkedService + from .office365_linked_service import Office365LinkedService + from .azure_blob_fs_linked_service import AzureBlobFSLinkedService from .azure_data_lake_store_linked_service import AzureDataLakeStoreLinkedService + from .cosmos_db_mongo_db_api_linked_service import CosmosDbMongoDbApiLinkedService + from .mongo_db_v2_linked_service import MongoDbV2LinkedService from .mongo_db_linked_service import MongoDbLinkedService from .cassandra_linked_service import CassandraLinkedService from .web_client_certificate_authentication import WebClientCertificateAuthentication @@ -516,6 +683,8 @@ from .web_linked_service import WebLinkedService from .odata_linked_service import ODataLinkedService from .hdfs_linked_service import HdfsLinkedService + from .microsoft_access_linked_service import MicrosoftAccessLinkedService + from .informix_linked_service import InformixLinkedService from .odbc_linked_service import OdbcLinkedService from .azure_ml_linked_service import AzureMLLinkedService from .teradata_linked_service import TeradataLinkedService @@ -527,16 +696,23 @@ from .oracle_linked_service import OracleLinkedService from .file_server_linked_service import FileServerLinkedService from .hd_insight_linked_service import HDInsightLinkedService + from .common_data_service_for_apps_linked_service import CommonDataServiceForAppsLinkedService + from .dynamics_crm_linked_service import DynamicsCrmLinkedService from .dynamics_linked_service import DynamicsLinkedService from .cosmos_db_linked_service import CosmosDbLinkedService from .azure_key_vault_linked_service import AzureKeyVaultLinkedService from .azure_batch_linked_service import AzureBatchLinkedService + from .azure_sql_mi_linked_service import AzureSqlMILinkedService from .azure_sql_database_linked_service import AzureSqlDatabaseLinkedService from .sql_server_linked_service import SqlServerLinkedService from .azure_sql_dw_linked_service import AzureSqlDWLinkedService from .azure_table_storage_linked_service import AzureTableStorageLinkedService from .azure_blob_storage_linked_service import AzureBlobStorageLinkedService from .azure_storage_linked_service import AzureStorageLinkedService + from .google_ad_words_object_dataset import GoogleAdWordsObjectDataset + from .azure_data_explorer_table_dataset import AzureDataExplorerTableDataset + from .oracle_service_cloud_object_dataset import OracleServiceCloudObjectDataset + from .dynamics_ax_resource_dataset import DynamicsAXResourceDataset from .responsys_object_dataset import ResponsysObjectDataset from .salesforce_marketing_cloud_object_dataset import SalesforceMarketingCloudObjectDataset from .vertica_table_dataset import VerticaTableDataset @@ -552,6 +728,7 @@ from .phoenix_object_dataset import PhoenixObjectDataset from .paypal_object_dataset import PaypalObjectDataset from .marketo_object_dataset import MarketoObjectDataset + from .azure_maria_db_table_dataset import AzureMariaDBTableDataset from .maria_db_table_dataset import MariaDBTableDataset from .magento_object_dataset import MagentoObjectDataset from .jira_object_dataset import JiraObjectDataset @@ -581,25 +758,62 @@ from .http_dataset import HttpDataset from .azure_search_index_dataset import AzureSearchIndexDataset from .web_table_dataset import WebTableDataset + from .sap_table_resource_dataset import SapTableResourceDataset + from .rest_resource_dataset import RestResourceDataset from .sql_server_table_dataset import SqlServerTableDataset + from .sap_open_hub_table_dataset import SapOpenHubTableDataset + from .sap_hana_table_dataset import SapHanaTableDataset from .sap_ecc_resource_dataset import SapEccResourceDataset from .sap_cloud_for_customer_resource_dataset import SapCloudForCustomerResourceDataset + from .sap_bw_cube_dataset import SapBwCubeDataset + from .sybase_table_dataset import SybaseTableDataset + from .salesforce_service_cloud_object_dataset import SalesforceServiceCloudObjectDataset from .salesforce_object_dataset import SalesforceObjectDataset + from .microsoft_access_table_dataset import MicrosoftAccessTableDataset + from .postgre_sql_table_dataset import PostgreSqlTableDataset + from .my_sql_table_dataset import MySqlTableDataset + from .odbc_table_dataset import OdbcTableDataset + from .informix_table_dataset import InformixTableDataset from .relational_table_dataset import RelationalTableDataset + from .db2_table_dataset import Db2TableDataset + from .amazon_redshift_table_dataset import AmazonRedshiftTableDataset from .azure_my_sql_table_dataset import AzureMySqlTableDataset + from .teradata_table_dataset import TeradataTableDataset from .oracle_table_dataset import OracleTableDataset from .odata_resource_dataset import ODataResourceDataset + from .cosmos_db_mongo_db_api_collection_dataset import CosmosDbMongoDbApiCollectionDataset + from .mongo_db_v2_collection_dataset import MongoDbV2CollectionDataset from .mongo_db_collection_dataset import MongoDbCollectionDataset from .file_share_dataset import FileShareDataset + from .office365_dataset import Office365Dataset + from .azure_blob_fs_dataset import AzureBlobFSDataset from .azure_data_lake_store_dataset import AzureDataLakeStoreDataset + from .common_data_service_for_apps_entity_dataset import CommonDataServiceForAppsEntityDataset + from .dynamics_crm_entity_dataset import DynamicsCrmEntityDataset from .dynamics_entity_dataset import DynamicsEntityDataset from .document_db_collection_dataset import DocumentDbCollectionDataset from .custom_dataset import CustomDataset from .cassandra_table_dataset import CassandraTableDataset from .azure_sql_dw_table_dataset import AzureSqlDWTableDataset + from .azure_sql_mi_table_dataset import AzureSqlMITableDataset from .azure_sql_table_dataset import AzureSqlTableDataset from .azure_table_dataset import AzureTableDataset from .azure_blob_dataset import AzureBlobDataset + from .hdfs_location import HdfsLocation + from .http_server_location import HttpServerLocation + from .sftp_location import SftpLocation + from .ftp_server_location import FtpServerLocation + from .file_server_location import FileServerLocation + from .amazon_s3_location import AmazonS3Location + from .azure_data_lake_store_location import AzureDataLakeStoreLocation + from .azure_blob_fs_location import AzureBlobFSLocation + from .azure_blob_storage_location import AzureBlobStorageLocation + from .dataset_location import DatasetLocation + from .binary_dataset import BinaryDataset + from .json_dataset import JsonDataset + from .delimited_text_dataset import DelimitedTextDataset + from .parquet_dataset import ParquetDataset + from .avro_dataset import AvroDataset from .amazon_s3_dataset import AmazonS3Dataset from .activity_policy import ActivityPolicy from .azure_function_activity import AzureFunctionActivity @@ -615,9 +829,13 @@ from .web_activity import WebActivity from .redshift_unload_settings import RedshiftUnloadSettings from .amazon_redshift_source import AmazonRedshiftSource + from .google_ad_words_source import GoogleAdWordsSource + from .oracle_service_cloud_source import OracleServiceCloudSource + from .dynamics_ax_source import DynamicsAXSource from .responsys_source import ResponsysSource from .salesforce_marketing_cloud_source import SalesforceMarketingCloudSource from .vertica_source import VerticaSource + from .netezza_partition_settings import NetezzaPartitionSettings from .netezza_source import NetezzaSource from .zoho_source import ZohoSource from .xero_source import XeroSource @@ -630,6 +848,7 @@ from .phoenix_source import PhoenixSource from .paypal_source import PaypalSource from .marketo_source import MarketoSource + from .azure_maria_db_source import AzureMariaDBSource from .maria_db_source import MariaDBSource from .magento_source import MagentoSource from .jira_source import JiraSource @@ -646,33 +865,82 @@ from .azure_postgre_sql_source import AzurePostgreSqlSource from .amazon_mws_source import AmazonMWSSource from .http_source import HttpSource + from .azure_blob_fs_source import AzureBlobFSSource from .azure_data_lake_store_source import AzureDataLakeStoreSource + from .office365_source import Office365Source + from .mongo_db_cursor_methods_properties import MongoDbCursorMethodsProperties + from .cosmos_db_mongo_db_api_source import CosmosDbMongoDbApiSource + from .mongo_db_v2_source import MongoDbV2Source from .mongo_db_source import MongoDbSource from .cassandra_source import CassandraSource from .web_source import WebSource + from .teradata_partition_settings import TeradataPartitionSettings + from .teradata_source import TeradataSource + from .oracle_partition_settings import OraclePartitionSettings from .oracle_source import OracleSource + from .azure_data_explorer_source import AzureDataExplorerSource from .azure_my_sql_source import AzureMySqlSource from .distcp_settings import DistcpSettings from .hdfs_source import HdfsSource from .file_system_source import FileSystemSource from .sql_dw_source import SqlDWSource from .stored_procedure_parameter import StoredProcedureParameter + from .sql_mi_source import SqlMISource + from .azure_sql_source import AzureSqlSource + from .sql_server_source import SqlServerSource from .sql_source import SqlSource + from .rest_source import RestSource + from .sap_table_partition_settings import SapTablePartitionSettings + from .sap_table_source import SapTableSource + from .sap_open_hub_source import SapOpenHubSource + from .sap_hana_source import SapHanaSource from .sap_ecc_source import SapEccSource from .sap_cloud_for_customer_source import SapCloudForCustomerSource + from .salesforce_service_cloud_source import SalesforceServiceCloudSource from .salesforce_source import SalesforceSource + from .odata_source import ODataSource + from .sap_bw_source import SapBwSource + from .sybase_source import SybaseSource + from .postgre_sql_source import PostgreSqlSource + from .my_sql_source import MySqlSource + from .odbc_source import OdbcSource + from .db2_source import Db2Source + from .microsoft_access_source import MicrosoftAccessSource + from .informix_source import InformixSource from .relational_source import RelationalSource + from .common_data_service_for_apps_source import CommonDataServiceForAppsSource + from .dynamics_crm_source import DynamicsCrmSource from .dynamics_source import DynamicsSource from .document_db_collection_source import DocumentDbCollectionSource from .blob_source import BlobSource from .azure_table_source import AzureTableSource + from .hdfs_read_settings import HdfsReadSettings + from .http_read_settings import HttpReadSettings + from .sftp_read_settings import SftpReadSettings + from .ftp_read_settings import FtpReadSettings + from .file_server_read_settings import FileServerReadSettings + from .amazon_s3_read_settings import AmazonS3ReadSettings + from .azure_data_lake_store_read_settings import AzureDataLakeStoreReadSettings + from .azure_blob_fs_read_settings import AzureBlobFSReadSettings + from .azure_blob_storage_read_settings import AzureBlobStorageReadSettings + from .store_read_settings import StoreReadSettings + from .binary_source import BinarySource + from .json_source import JsonSource + from .format_read_settings import FormatReadSettings + from .delimited_text_read_settings import DelimitedTextReadSettings + from .delimited_text_source import DelimitedTextSource + from .parquet_source import ParquetSource + from .avro_source import AvroSource from .copy_source import CopySource from .lookup_activity import LookupActivity + from .azure_data_explorer_command_activity import AzureDataExplorerCommandActivity from .log_storage_settings import LogStorageSettings from .delete_activity import DeleteActivity from .sql_server_stored_procedure_activity import SqlServerStoredProcedureActivity from .custom_activity_reference_object import CustomActivityReferenceObject from .custom_activity import CustomActivity + from .ssis_access_credential import SSISAccessCredential + from .ssis_log_location import SSISLogLocation from .ssis_property_override import SSISPropertyOverride from .ssis_execution_parameter import SSISExecutionParameter from .ssis_execution_credential import SSISExecutionCredential @@ -687,27 +955,56 @@ from .staging_settings import StagingSettings from .tabular_translator import TabularTranslator from .copy_translator import CopyTranslator + from .cosmos_db_mongo_db_api_sink import CosmosDbMongoDbApiSink + from .salesforce_service_cloud_sink import SalesforceServiceCloudSink from .salesforce_sink import SalesforceSink + from .azure_data_explorer_sink import AzureDataExplorerSink + from .common_data_service_for_apps_sink import CommonDataServiceForAppsSink + from .dynamics_crm_sink import DynamicsCrmSink from .dynamics_sink import DynamicsSink + from .microsoft_access_sink import MicrosoftAccessSink + from .informix_sink import InformixSink from .odbc_sink import OdbcSink from .azure_search_index_sink import AzureSearchIndexSink + from .azure_blob_fs_sink import AzureBlobFSSink from .azure_data_lake_store_sink import AzureDataLakeStoreSink from .oracle_sink import OracleSink from .polybase_settings import PolybaseSettings from .sql_dw_sink import SqlDWSink + from .sql_mi_sink import SqlMISink + from .azure_sql_sink import AzureSqlSink + from .sql_server_sink import SqlServerSink from .sql_sink import SqlSink from .document_db_collection_sink import DocumentDbCollectionSink from .file_system_sink import FileSystemSink from .blob_sink import BlobSink + from .file_server_write_settings import FileServerWriteSettings + from .azure_data_lake_store_write_settings import AzureDataLakeStoreWriteSettings + from .azure_blob_fs_write_settings import AzureBlobFSWriteSettings + from .azure_blob_storage_write_settings import AzureBlobStorageWriteSettings + from .store_write_settings import StoreWriteSettings + from .binary_sink import BinarySink + from .parquet_sink import ParquetSink + from .json_write_settings import JsonWriteSettings + from .delimited_text_write_settings import DelimitedTextWriteSettings + from .format_write_settings import FormatWriteSettings + from .avro_write_settings import AvroWriteSettings + from .avro_sink import AvroSink from .azure_table_sink import AzureTableSink from .azure_queue_sink import AzureQueueSink from .sap_cloud_for_customer_sink import SapCloudForCustomerSink + from .azure_my_sql_sink import AzureMySqlSink + from .azure_postgre_sql_sink import AzurePostgreSqlSink + from .json_sink import JsonSink + from .delimited_text_sink import DelimitedTextSink from .copy_sink import CopySink from .copy_activity import CopyActivity from .execution_activity import ExecutionActivity + from .web_hook_activity import WebHookActivity from .append_variable_activity import AppendVariableActivity from .set_variable_activity import SetVariableActivity from .filter_activity import FilterActivity + from .validation_activity import ValidationActivity from .until_activity import UntilActivity from .wait_activity import WaitActivity from .for_each_activity import ForEachActivity @@ -725,6 +1022,8 @@ from .linked_integration_runtime_key_authorization import LinkedIntegrationRuntimeKeyAuthorization from .linked_integration_runtime_type import LinkedIntegrationRuntimeType from .self_hosted_integration_runtime import SelfHostedIntegrationRuntime + from .entity_reference import EntityReference + from .integration_runtime_data_proxy_properties import IntegrationRuntimeDataProxyProperties from .integration_runtime_custom_setup_script_properties import IntegrationRuntimeCustomSetupScriptProperties from .integration_runtime_ssis_catalog_info import IntegrationRuntimeSsisCatalogInfo from .integration_runtime_ssis_properties import IntegrationRuntimeSsisProperties @@ -732,6 +1031,13 @@ from .integration_runtime_compute_properties import IntegrationRuntimeComputeProperties from .managed_integration_runtime import ManagedIntegrationRuntime from .integration_runtime_node_ip_address import IntegrationRuntimeNodeIpAddress + from .ssis_variable import SsisVariable + from .ssis_environment import SsisEnvironment + from .ssis_parameter import SsisParameter + from .ssis_package import SsisPackage + from .ssis_environment_reference import SsisEnvironmentReference + from .ssis_project import SsisProject + from .ssis_folder import SsisFolder from .ssis_object_metadata import SsisObjectMetadata from .ssis_object_metadata_list_response import SsisObjectMetadataListResponse from .integration_runtime_node_monitoring_data import IntegrationRuntimeNodeMonitoringData @@ -754,6 +1060,7 @@ DependencyCondition, VariableType, TriggerRuntimeState, + EventSubscriptionStatus, RunQueryFilterOperand, RunQueryFilterOperator, RunQueryOrderByField, @@ -764,6 +1071,7 @@ DayOfWeek, DaysOfWeek, RecurrenceFrequency, + GoogleAdWordsAuthenticationType, SparkServerType, SparkThriftTransportProtocol, SparkAuthenticationType, @@ -780,29 +1088,39 @@ SftpAuthenticationType, FtpAuthenticationType, HttpAuthenticationType, + RestServiceAuthenticationType, MongoDbAuthenticationType, ODataAuthenticationType, + ODataAadServicePrincipalCredentialType, TeradataAuthenticationType, Db2AuthenticationType, SybaseAuthenticationType, - DatasetCompressionLevel, - JsonFormatFilePattern, + DynamicsDeploymentType, + DynamicsAuthenticationType, + AvroCompressionCodec, AzureFunctionActivityMethod, WebActivityMethod, + NetezzaPartitionOption, CassandraSourceReadConsistencyLevels, + TeradataPartitionOption, + OraclePartitionOption, StoredProcedureParameterType, + SapTablePartitionOption, SalesforceSourceReadBehavior, + SsisPackageLocationType, HDInsightActivityDebugInfoOption, SalesforceSinkWriteBehavior, AzureSearchIndexWriteBehaviorType, - CopyBehaviorType, PolybaseSettingsRejectType, + JsonWriteFilePattern, SapCloudForCustomerSinkWriteBehavior, + WebHookActivityMethod, IntegrationRuntimeType, SelfHostedIntegrationRuntimeNodeStatus, IntegrationRuntimeUpdateResult, IntegrationRuntimeInternalChannelEncryptionMode, ManagedIntegrationRuntimeNodeStatus, + IntegrationRuntimeEntityReferenceType, IntegrationRuntimeSsisCatalogPricingTier, IntegrationRuntimeLicenseType, IntegrationRuntimeEdition, @@ -846,6 +1164,7 @@ 'Trigger', 'TriggerResource', 'CreateRunResponse', + 'TriggerSubscriptionOperationStatus', 'FactoryVSTSConfiguration', 'FactoryGitHubConfiguration', 'FactoryRepoUpdate', @@ -896,6 +1215,11 @@ 'ScheduleTrigger', 'MultiplePipelineTrigger', 'AzureFunctionLinkedService', + 'AzureDataExplorerLinkedService', + 'SapTableLinkedService', + 'GoogleAdWordsLinkedService', + 'OracleServiceCloudLinkedService', + 'DynamicsAXLinkedService', 'ResponsysLinkedService', 'AzureDatabricksLinkedService', 'AzureDataLakeAnalyticsLinkedService', @@ -915,6 +1239,7 @@ 'PhoenixLinkedService', 'PaypalLinkedService', 'MarketoLinkedService', + 'AzureMariaDBLinkedService', 'MariaDBLinkedService', 'MagentoLinkedService', 'JiraLinkedService', @@ -939,10 +1264,17 @@ 'CustomDataSourceLinkedService', 'AmazonRedshiftLinkedService', 'AmazonS3LinkedService', + 'RestServiceLinkedService', + 'SapOpenHubLinkedService', 'SapEccLinkedService', 'SapCloudForCustomerLinkedService', + 'SalesforceServiceCloudLinkedService', 'SalesforceLinkedService', + 'Office365LinkedService', + 'AzureBlobFSLinkedService', 'AzureDataLakeStoreLinkedService', + 'CosmosDbMongoDbApiLinkedService', + 'MongoDbV2LinkedService', 'MongoDbLinkedService', 'CassandraLinkedService', 'WebClientCertificateAuthentication', @@ -952,6 +1284,8 @@ 'WebLinkedService', 'ODataLinkedService', 'HdfsLinkedService', + 'MicrosoftAccessLinkedService', + 'InformixLinkedService', 'OdbcLinkedService', 'AzureMLLinkedService', 'TeradataLinkedService', @@ -963,16 +1297,23 @@ 'OracleLinkedService', 'FileServerLinkedService', 'HDInsightLinkedService', + 'CommonDataServiceForAppsLinkedService', + 'DynamicsCrmLinkedService', 'DynamicsLinkedService', 'CosmosDbLinkedService', 'AzureKeyVaultLinkedService', 'AzureBatchLinkedService', + 'AzureSqlMILinkedService', 'AzureSqlDatabaseLinkedService', 'SqlServerLinkedService', 'AzureSqlDWLinkedService', 'AzureTableStorageLinkedService', 'AzureBlobStorageLinkedService', 'AzureStorageLinkedService', + 'GoogleAdWordsObjectDataset', + 'AzureDataExplorerTableDataset', + 'OracleServiceCloudObjectDataset', + 'DynamicsAXResourceDataset', 'ResponsysObjectDataset', 'SalesforceMarketingCloudObjectDataset', 'VerticaTableDataset', @@ -988,6 +1329,7 @@ 'PhoenixObjectDataset', 'PaypalObjectDataset', 'MarketoObjectDataset', + 'AzureMariaDBTableDataset', 'MariaDBTableDataset', 'MagentoObjectDataset', 'JiraObjectDataset', @@ -1017,25 +1359,62 @@ 'HttpDataset', 'AzureSearchIndexDataset', 'WebTableDataset', + 'SapTableResourceDataset', + 'RestResourceDataset', 'SqlServerTableDataset', + 'SapOpenHubTableDataset', + 'SapHanaTableDataset', 'SapEccResourceDataset', 'SapCloudForCustomerResourceDataset', + 'SapBwCubeDataset', + 'SybaseTableDataset', + 'SalesforceServiceCloudObjectDataset', 'SalesforceObjectDataset', + 'MicrosoftAccessTableDataset', + 'PostgreSqlTableDataset', + 'MySqlTableDataset', + 'OdbcTableDataset', + 'InformixTableDataset', 'RelationalTableDataset', + 'Db2TableDataset', + 'AmazonRedshiftTableDataset', 'AzureMySqlTableDataset', + 'TeradataTableDataset', 'OracleTableDataset', 'ODataResourceDataset', + 'CosmosDbMongoDbApiCollectionDataset', + 'MongoDbV2CollectionDataset', 'MongoDbCollectionDataset', 'FileShareDataset', + 'Office365Dataset', + 'AzureBlobFSDataset', 'AzureDataLakeStoreDataset', + 'CommonDataServiceForAppsEntityDataset', + 'DynamicsCrmEntityDataset', 'DynamicsEntityDataset', 'DocumentDbCollectionDataset', 'CustomDataset', 'CassandraTableDataset', 'AzureSqlDWTableDataset', + 'AzureSqlMITableDataset', 'AzureSqlTableDataset', 'AzureTableDataset', 'AzureBlobDataset', + 'HdfsLocation', + 'HttpServerLocation', + 'SftpLocation', + 'FtpServerLocation', + 'FileServerLocation', + 'AmazonS3Location', + 'AzureDataLakeStoreLocation', + 'AzureBlobFSLocation', + 'AzureBlobStorageLocation', + 'DatasetLocation', + 'BinaryDataset', + 'JsonDataset', + 'DelimitedTextDataset', + 'ParquetDataset', + 'AvroDataset', 'AmazonS3Dataset', 'ActivityPolicy', 'AzureFunctionActivity', @@ -1051,9 +1430,13 @@ 'WebActivity', 'RedshiftUnloadSettings', 'AmazonRedshiftSource', + 'GoogleAdWordsSource', + 'OracleServiceCloudSource', + 'DynamicsAXSource', 'ResponsysSource', 'SalesforceMarketingCloudSource', 'VerticaSource', + 'NetezzaPartitionSettings', 'NetezzaSource', 'ZohoSource', 'XeroSource', @@ -1066,6 +1449,7 @@ 'PhoenixSource', 'PaypalSource', 'MarketoSource', + 'AzureMariaDBSource', 'MariaDBSource', 'MagentoSource', 'JiraSource', @@ -1082,33 +1466,82 @@ 'AzurePostgreSqlSource', 'AmazonMWSSource', 'HttpSource', + 'AzureBlobFSSource', 'AzureDataLakeStoreSource', + 'Office365Source', + 'MongoDbCursorMethodsProperties', + 'CosmosDbMongoDbApiSource', + 'MongoDbV2Source', 'MongoDbSource', 'CassandraSource', 'WebSource', + 'TeradataPartitionSettings', + 'TeradataSource', + 'OraclePartitionSettings', 'OracleSource', + 'AzureDataExplorerSource', 'AzureMySqlSource', 'DistcpSettings', 'HdfsSource', 'FileSystemSource', 'SqlDWSource', 'StoredProcedureParameter', + 'SqlMISource', + 'AzureSqlSource', + 'SqlServerSource', 'SqlSource', + 'RestSource', + 'SapTablePartitionSettings', + 'SapTableSource', + 'SapOpenHubSource', + 'SapHanaSource', 'SapEccSource', 'SapCloudForCustomerSource', + 'SalesforceServiceCloudSource', 'SalesforceSource', + 'ODataSource', + 'SapBwSource', + 'SybaseSource', + 'PostgreSqlSource', + 'MySqlSource', + 'OdbcSource', + 'Db2Source', + 'MicrosoftAccessSource', + 'InformixSource', 'RelationalSource', + 'CommonDataServiceForAppsSource', + 'DynamicsCrmSource', 'DynamicsSource', 'DocumentDbCollectionSource', 'BlobSource', 'AzureTableSource', + 'HdfsReadSettings', + 'HttpReadSettings', + 'SftpReadSettings', + 'FtpReadSettings', + 'FileServerReadSettings', + 'AmazonS3ReadSettings', + 'AzureDataLakeStoreReadSettings', + 'AzureBlobFSReadSettings', + 'AzureBlobStorageReadSettings', + 'StoreReadSettings', + 'BinarySource', + 'JsonSource', + 'FormatReadSettings', + 'DelimitedTextReadSettings', + 'DelimitedTextSource', + 'ParquetSource', + 'AvroSource', 'CopySource', 'LookupActivity', + 'AzureDataExplorerCommandActivity', 'LogStorageSettings', 'DeleteActivity', 'SqlServerStoredProcedureActivity', 'CustomActivityReferenceObject', 'CustomActivity', + 'SSISAccessCredential', + 'SSISLogLocation', 'SSISPropertyOverride', 'SSISExecutionParameter', 'SSISExecutionCredential', @@ -1123,27 +1556,56 @@ 'StagingSettings', 'TabularTranslator', 'CopyTranslator', + 'CosmosDbMongoDbApiSink', + 'SalesforceServiceCloudSink', 'SalesforceSink', + 'AzureDataExplorerSink', + 'CommonDataServiceForAppsSink', + 'DynamicsCrmSink', 'DynamicsSink', + 'MicrosoftAccessSink', + 'InformixSink', 'OdbcSink', 'AzureSearchIndexSink', + 'AzureBlobFSSink', 'AzureDataLakeStoreSink', 'OracleSink', 'PolybaseSettings', 'SqlDWSink', + 'SqlMISink', + 'AzureSqlSink', + 'SqlServerSink', 'SqlSink', 'DocumentDbCollectionSink', 'FileSystemSink', 'BlobSink', + 'FileServerWriteSettings', + 'AzureDataLakeStoreWriteSettings', + 'AzureBlobFSWriteSettings', + 'AzureBlobStorageWriteSettings', + 'StoreWriteSettings', + 'BinarySink', + 'ParquetSink', + 'JsonWriteSettings', + 'DelimitedTextWriteSettings', + 'FormatWriteSettings', + 'AvroWriteSettings', + 'AvroSink', 'AzureTableSink', 'AzureQueueSink', 'SapCloudForCustomerSink', + 'AzureMySqlSink', + 'AzurePostgreSqlSink', + 'JsonSink', + 'DelimitedTextSink', 'CopySink', 'CopyActivity', 'ExecutionActivity', + 'WebHookActivity', 'AppendVariableActivity', 'SetVariableActivity', 'FilterActivity', + 'ValidationActivity', 'UntilActivity', 'WaitActivity', 'ForEachActivity', @@ -1161,6 +1623,8 @@ 'LinkedIntegrationRuntimeKeyAuthorization', 'LinkedIntegrationRuntimeType', 'SelfHostedIntegrationRuntime', + 'EntityReference', + 'IntegrationRuntimeDataProxyProperties', 'IntegrationRuntimeCustomSetupScriptProperties', 'IntegrationRuntimeSsisCatalogInfo', 'IntegrationRuntimeSsisProperties', @@ -1168,6 +1632,13 @@ 'IntegrationRuntimeComputeProperties', 'ManagedIntegrationRuntime', 'IntegrationRuntimeNodeIpAddress', + 'SsisVariable', + 'SsisEnvironment', + 'SsisParameter', + 'SsisPackage', + 'SsisEnvironmentReference', + 'SsisProject', + 'SsisFolder', 'SsisObjectMetadata', 'SsisObjectMetadataListResponse', 'IntegrationRuntimeNodeMonitoringData', @@ -1189,6 +1660,7 @@ 'DependencyCondition', 'VariableType', 'TriggerRuntimeState', + 'EventSubscriptionStatus', 'RunQueryFilterOperand', 'RunQueryFilterOperator', 'RunQueryOrderByField', @@ -1199,6 +1671,7 @@ 'DayOfWeek', 'DaysOfWeek', 'RecurrenceFrequency', + 'GoogleAdWordsAuthenticationType', 'SparkServerType', 'SparkThriftTransportProtocol', 'SparkAuthenticationType', @@ -1215,29 +1688,39 @@ 'SftpAuthenticationType', 'FtpAuthenticationType', 'HttpAuthenticationType', + 'RestServiceAuthenticationType', 'MongoDbAuthenticationType', 'ODataAuthenticationType', + 'ODataAadServicePrincipalCredentialType', 'TeradataAuthenticationType', 'Db2AuthenticationType', 'SybaseAuthenticationType', - 'DatasetCompressionLevel', - 'JsonFormatFilePattern', + 'DynamicsDeploymentType', + 'DynamicsAuthenticationType', + 'AvroCompressionCodec', 'AzureFunctionActivityMethod', 'WebActivityMethod', + 'NetezzaPartitionOption', 'CassandraSourceReadConsistencyLevels', + 'TeradataPartitionOption', + 'OraclePartitionOption', 'StoredProcedureParameterType', + 'SapTablePartitionOption', 'SalesforceSourceReadBehavior', + 'SsisPackageLocationType', 'HDInsightActivityDebugInfoOption', 'SalesforceSinkWriteBehavior', 'AzureSearchIndexWriteBehaviorType', - 'CopyBehaviorType', 'PolybaseSettingsRejectType', + 'JsonWriteFilePattern', 'SapCloudForCustomerSinkWriteBehavior', + 'WebHookActivityMethod', 'IntegrationRuntimeType', 'SelfHostedIntegrationRuntimeNodeStatus', 'IntegrationRuntimeUpdateResult', 'IntegrationRuntimeInternalChannelEncryptionMode', 'ManagedIntegrationRuntimeNodeStatus', + 'IntegrationRuntimeEntityReferenceType', 'IntegrationRuntimeSsisCatalogPricingTier', 'IntegrationRuntimeLicenseType', 'IntegrationRuntimeEdition', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py index 4531b28777c6..b1e5ed533bba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py @@ -29,7 +29,7 @@ class AmazonMWSLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py index 421c20dc2d4a..a8db63933154 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py @@ -29,7 +29,7 @@ class AmazonMWSLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py index 1cabba2201c7..f9d034e610d4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py @@ -27,6 +27,10 @@ class AmazonMWSSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class AmazonMWSSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py index 895281f9af51..9ef7f5b30244 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py @@ -27,6 +27,10 @@ class AmazonMWSSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class AmazonMWSSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'AmazonMWSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py index a85e73b458ae..4272b28c13f5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py @@ -29,7 +29,7 @@ class AmazonRedshiftLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py index 7912ad040946..3b84583c6c86 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py @@ -29,7 +29,7 @@ class AmazonRedshiftLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py index 0fa9a82ff9db..d4fdfa4aa2ba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py @@ -27,6 +27,10 @@ class AmazonRedshiftSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType @@ -48,6 +52,7 @@ class AmazonRedshiftSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py index 9542e56e4850..9b34b2ef5b97 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py @@ -27,6 +27,10 @@ class AmazonRedshiftSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType @@ -48,13 +52,14 @@ class AmazonRedshiftSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, redshift_unload_settings=None, **kwargs) -> None: - super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, redshift_unload_settings=None, **kwargs) -> None: + super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.redshift_unload_settings = redshift_unload_settings self.type = 'AmazonRedshiftSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset.py new file mode 100644 index 000000000000..987151367421 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AmazonRedshiftTableDataset(Dataset): + """The Amazon Redshift table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The Amazon Redshift table name. Type: string (or Expression + with resultType string). + :type table: object + :param amazon_redshift_table_dataset_schema: The Amazon Redshift schema + name. Type: string (or Expression with resultType string). + :type amazon_redshift_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'amazon_redshift_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonRedshiftTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.amazon_redshift_table_dataset_schema = kwargs.get('amazon_redshift_table_dataset_schema', None) + self.type = 'AmazonRedshiftTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset_py3.py new file mode 100644 index 000000000000..ceceaaba43e4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AmazonRedshiftTableDataset(Dataset): + """The Amazon Redshift table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The Amazon Redshift table name. Type: string (or Expression + with resultType string). + :type table: object + :param amazon_redshift_table_dataset_schema: The Amazon Redshift schema + name. Type: string (or Expression with resultType string). + :type amazon_redshift_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'amazon_redshift_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, amazon_redshift_table_dataset_schema=None, **kwargs) -> None: + super(AmazonRedshiftTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.amazon_redshift_table_dataset_schema = amazon_redshift_table_dataset_schema + self.type = 'AmazonRedshiftTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py index d6262a013b0d..e91a5ba26131 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py @@ -55,6 +55,12 @@ class AmazonS3Dataset(Dataset): :param version: The version for the S3 object. Type: string (or Expression with resultType string). :type version: object + :param modified_datetime_start: The start of S3 object's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of S3 object's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object :param format: The format of files. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the Amazon S3 @@ -82,6 +88,8 @@ class AmazonS3Dataset(Dataset): 'key': {'key': 'typeProperties.key', 'type': 'object'}, 'prefix': {'key': 'typeProperties.prefix', 'type': 'object'}, 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } @@ -92,6 +100,8 @@ def __init__(self, **kwargs): self.key = kwargs.get('key', None) self.prefix = kwargs.get('prefix', None) self.version = kwargs.get('version', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) self.format = kwargs.get('format', None) self.compression = kwargs.get('compression', None) self.type = 'AmazonS3Object' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py index 3936e9646a09..d84ae48b2a46 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py @@ -55,6 +55,12 @@ class AmazonS3Dataset(Dataset): :param version: The version for the S3 object. Type: string (or Expression with resultType string). :type version: object + :param modified_datetime_start: The start of S3 object's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of S3 object's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object :param format: The format of files. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the Amazon S3 @@ -82,16 +88,20 @@ class AmazonS3Dataset(Dataset): 'key': {'key': 'typeProperties.key', 'type': 'object'}, 'prefix': {'key': 'typeProperties.prefix', 'type': 'object'}, 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, *, linked_service_name, bucket_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, key=None, prefix=None, version=None, format=None, compression=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, bucket_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, key=None, prefix=None, version=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None, **kwargs) -> None: super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.bucket_name = bucket_name self.key = key self.prefix = prefix self.version = version + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end self.format = format self.compression = compression self.type = 'AmazonS3Object' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py index c9ff7261d915..250518c1a7ec 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py @@ -29,7 +29,7 @@ class AmazonS3LinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -40,6 +40,11 @@ class AmazonS3LinkedService(LinkedService): :param secret_access_key: The secret access key of the Amazon S3 Identity and Access Management (IAM) user. :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_url: This value specifies the endpoint to access with the + S3 Connector. This is an optional property; change it only if you want to + try a different service endpoint or want to switch between https and http. + Type: string (or Expression with resultType string). + :type service_url: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -59,6 +64,7 @@ class AmazonS3LinkedService(LinkedService): 'type': {'key': 'type', 'type': 'str'}, 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -66,5 +72,6 @@ def __init__(self, **kwargs): super(AmazonS3LinkedService, self).__init__(**kwargs) self.access_key_id = kwargs.get('access_key_id', None) self.secret_access_key = kwargs.get('secret_access_key', None) + self.service_url = kwargs.get('service_url', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AmazonS3' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py index 044e8bc299cf..8d136bb71fc0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py @@ -29,7 +29,7 @@ class AmazonS3LinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -40,6 +40,11 @@ class AmazonS3LinkedService(LinkedService): :param secret_access_key: The secret access key of the Amazon S3 Identity and Access Management (IAM) user. :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_url: This value specifies the endpoint to access with the + S3 Connector. This is an optional property; change it only if you want to + try a different service endpoint or want to switch between https and http. + Type: string (or Expression with resultType string). + :type service_url: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -59,12 +64,14 @@ class AmazonS3LinkedService(LinkedService): 'type': {'key': 'type', 'type': 'str'}, 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, service_url=None, encrypted_credential=None, **kwargs) -> None: super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.access_key_id = access_key_id self.secret_access_key = secret_access_key + self.service_url = service_url self.encrypted_credential = encrypted_credential self.type = 'AmazonS3' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py new file mode 100644 index 000000000000..74c77a16f0f2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class AmazonS3Location(DatasetLocation): + """The location of amazon S3 dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param bucket_name: Specify the bucketName of amazon S3. Type: string (or + Expression with resultType string) + :type bucket_name: object + :param version: Specify the version of amazon S3. Type: string (or + Expression with resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonS3Location, self).__init__(**kwargs) + self.bucket_name = kwargs.get('bucket_name', None) + self.version = kwargs.get('version', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py new file mode 100644 index 000000000000..36afce341ada --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AmazonS3Location(DatasetLocation): + """The location of amazon S3 dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param bucket_name: Specify the bucketName of amazon S3. Type: string (or + Expression with resultType string) + :type bucket_name: object + :param version: Specify the version of amazon S3. Type: string (or + Expression with resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, bucket_name=None, version=None, **kwargs) -> None: + super(AmazonS3Location, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.bucket_name = bucket_name + self.version = version diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings.py new file mode 100644 index 000000000000..e83910136070 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings import StoreReadSettings + + +class AmazonS3ReadSettings(StoreReadSettings): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the S3 object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonS3ReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.prefix = kwargs.get('prefix', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings_py3.py new file mode 100644 index 000000000000..79645a869ac8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class AmazonS3ReadSettings(StoreReadSettings): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the S3 object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.prefix = prefix + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset.py new file mode 100644 index 000000000000..d206ac99ab85 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AvroDataset(Dataset): + """Avro dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the avro storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param avro_compression_codec: Possible values include: 'none', 'deflate', + 'snappy', 'xz', 'bzip2' + :type avro_compression_codec: str or + ~azure.mgmt.datafactory.models.AvroCompressionCodec + :param avro_compression_level: + :type avro_compression_level: int + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + 'avro_compression_level': {'maximum': 9, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, + 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(AvroDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.avro_compression_codec = kwargs.get('avro_compression_codec', None) + self.avro_compression_level = kwargs.get('avro_compression_level', None) + self.type = 'Avro' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset_py3.py new file mode 100644 index 000000000000..f0f44dbbd786 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset_py3.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AvroDataset(Dataset): + """Avro dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the avro storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param avro_compression_codec: Possible values include: 'none', 'deflate', + 'snappy', 'xz', 'bzip2' + :type avro_compression_codec: str or + ~azure.mgmt.datafactory.models.AvroCompressionCodec + :param avro_compression_level: + :type avro_compression_level: int + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + 'avro_compression_level': {'maximum': 9, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, + 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, avro_compression_codec=None, avro_compression_level: int=None, **kwargs) -> None: + super(AvroDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.avro_compression_codec = avro_compression_codec + self.avro_compression_level = avro_compression_level + self.type = 'Avro' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink.py new file mode 100644 index 000000000000..34d4ceb1e0f6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AvroSink(CopySink): + """A copy activity Avro sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Avro store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: Avro format settings. + :type format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, + } + + def __init__(self, **kwargs): + super(AvroSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + self.type = 'AvroSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink_py3.py new file mode 100644 index 000000000000..16363092dff2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AvroSink(CopySink): + """A copy activity Avro sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Avro store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: Avro format settings. + :type format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'AvroSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source.py new file mode 100644 index 000000000000..3ea2e7a2a76f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AvroSource(CopySource): + """A copy activity Avro source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Avro store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, **kwargs): + super(AvroSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'AvroSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source_py3.py new file mode 100644 index 000000000000..74b5e6db0fe2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source_py3.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AvroSource(CopySource): + """A copy activity Avro source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Avro store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'AvroSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings.py new file mode 100644 index 000000000000..ec068ee29885 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_settings import FormatWriteSettings + + +class AvroWriteSettings(FormatWriteSettings): + """Avro write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param record_name: Top level record name in write result, which is + required in AVRO spec. + :type record_name: str + :param record_namespace: Record namespace in the write result. + :type record_namespace: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'record_name': {'key': 'recordName', 'type': 'str'}, + 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AvroWriteSettings, self).__init__(**kwargs) + self.record_name = kwargs.get('record_name', None) + self.record_namespace = kwargs.get('record_namespace', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings_py3.py new file mode 100644 index 000000000000..d14ebc4d1d29 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_settings_py3 import FormatWriteSettings + + +class AvroWriteSettings(FormatWriteSettings): + """Avro write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param record_name: Top level record name in write result, which is + required in AVRO spec. + :type record_name: str + :param record_namespace: Record namespace in the write result. + :type record_namespace: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'record_name': {'key': 'recordName', 'type': 'str'}, + 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + } + + def __init__(self, *, type: str, additional_properties=None, record_name: str=None, record_namespace: str=None, **kwargs) -> None: + super(AvroWriteSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.record_name = record_name + self.record_namespace = record_namespace diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py index 2fcf33e8d0c8..986023308e23 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py @@ -29,7 +29,7 @@ class AzureBatchLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py index 63724f76f13f..e7d33dfb342a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureBatchLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py index c3f4ffc118ba..01814cf8f9a9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py @@ -52,6 +52,12 @@ class AzureBlobDataset(Dataset): :param file_name: The name of the Azure Blob. Type: string (or Expression with resultType string). :type file_name: object + :param modified_datetime_start: The start of Azure Blob's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of Azure Blob's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object :param format: The format of the Azure Blob storage. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the blob storage. @@ -76,6 +82,8 @@ class AzureBlobDataset(Dataset): 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, 'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'}, 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } @@ -85,6 +93,8 @@ def __init__(self, **kwargs): self.folder_path = kwargs.get('folder_path', None) self.table_root_location = kwargs.get('table_root_location', None) self.file_name = kwargs.get('file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) self.format = kwargs.get('format', None) self.compression = kwargs.get('compression', None) self.type = 'AzureBlob' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py index 7567e1fba9fb..706c39deb289 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py @@ -52,6 +52,12 @@ class AzureBlobDataset(Dataset): :param file_name: The name of the Azure Blob. Type: string (or Expression with resultType string). :type file_name: object + :param modified_datetime_start: The start of Azure Blob's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of Azure Blob's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object :param format: The format of the Azure Blob storage. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the blob storage. @@ -76,15 +82,19 @@ class AzureBlobDataset(Dataset): 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, 'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'}, 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, table_root_location=None, file_name=None, format=None, compression=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, table_root_location=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None, **kwargs) -> None: super(AzureBlobDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.folder_path = folder_path self.table_root_location = table_root_location self.file_name = file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end self.format = format self.compression = compression self.type = 'AzureBlob' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py new file mode 100644 index 000000000000..0ef62ff7122f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureBlobFSDataset(Dataset): + """The Azure Data Lake Storage Gen2 storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. + Type: string (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the Azure Data Lake Storage Gen2. Type: + string (or Expression with resultType string). + :type file_name: object + :param format: The format of the Azure Data Lake Storage Gen2 storage. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the blob storage. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSDataset, self).__init__(**kwargs) + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) + self.type = 'AzureBlobFSFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py new file mode 100644 index 000000000000..82136a683fd3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureBlobFSDataset(Dataset): + """The Azure Data Lake Storage Gen2 storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. + Type: string (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the Azure Data Lake Storage Gen2. Type: + string (or Expression with resultType string). + :type file_name: object + :param format: The format of the Azure Data Lake Storage Gen2 storage. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the blob storage. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None, **kwargs) -> None: + super(AzureBlobFSDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.folder_path = folder_path + self.file_name = file_name + self.format = format + self.compression = compression + self.type = 'AzureBlobFSFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py new file mode 100644 index 000000000000..262ce976227b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureBlobFSLinkedService(LinkedService): + """Azure Data Lake Storage Gen2 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type url: object + :param account_key: Account key for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type account_key: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. Type: + string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.account_key = kwargs.get('account_key', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureBlobFS' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py new file mode 100644 index 000000000000..f0d555078bf7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureBlobFSLinkedService(LinkedService): + """Azure Data Lake Storage Gen2 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type url: object + :param account_key: Account key for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type account_key: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. Type: + string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, account_key=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureBlobFSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.account_key = account_key + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureBlobFS' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py new file mode 100644 index 000000000000..c21525bbac4c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class AzureBlobFSLocation(DatasetLocation): + """The location of azure blobFS dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param file_system: Specify the fileSystem of azure blobFS. Type: string + (or Expression with resultType string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'file_system': {'key': 'fileSystem', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSLocation, self).__init__(**kwargs) + self.file_system = kwargs.get('file_system', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py new file mode 100644 index 000000000000..afbae52fdeb0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AzureBlobFSLocation(DatasetLocation): + """The location of azure blobFS dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param file_system: Specify the fileSystem of azure blobFS. Type: string + (or Expression with resultType string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'file_system': {'key': 'fileSystem', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, file_system=None, **kwargs) -> None: + super(AzureBlobFSLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.file_system = file_system diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings.py new file mode 100644 index 000000000000..6d80ce72ea57 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings import StoreReadSettings + + +class AzureBlobFSReadSettings(StoreReadSettings): + """Azure blobFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings_py3.py new file mode 100644 index 000000000000..af4746e84f8e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class AzureBlobFSReadSettings(StoreReadSettings): + """Azure blobFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureBlobFSReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py new file mode 100644 index 000000000000..a47b173c6581 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureBlobFSSink(CopySink): + """A copy activity Azure Data Lake Storage Gen2 sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSSink, self).__init__(**kwargs) + self.copy_behavior = kwargs.get('copy_behavior', None) + self.type = 'AzureBlobFSSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py new file mode 100644 index 000000000000..e2b28bf30a8c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureBlobFSSink(CopySink): + """A copy activity Azure Data Lake Storage Gen2 sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.copy_behavior = copy_behavior + self.type = 'AzureBlobFSSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py new file mode 100644 index 000000000000..0252ffd5ba8f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureBlobFSSource(CopySource): + """A copy activity Azure BlobFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param treat_empty_as_null: Treat empty as null. Type: boolean (or + Expression with resultType boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each + blob. Type: integer (or Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSSource, self).__init__(**kwargs) + self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) + self.skip_header_line_count = kwargs.get('skip_header_line_count', None) + self.recursive = kwargs.get('recursive', None) + self.type = 'AzureBlobFSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py new file mode 100644 index 000000000000..5b512c1f334f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureBlobFSSource(CopySource): + """A copy activity Azure BlobFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param treat_empty_as_null: Treat empty as null. Type: boolean (or + Expression with resultType boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each + blob. Type: integer (or Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: + super(AzureBlobFSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.treat_empty_as_null = treat_empty_as_null + self.skip_header_line_count = skip_header_line_count + self.recursive = recursive + self.type = 'AzureBlobFSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings.py new file mode 100644 index 000000000000..f91971b829f7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_write_settings import StoreWriteSettings + + +class AzureBlobFSWriteSettings(StoreWriteSettings): + """Azure blobFS write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + :param block_size_in_mb: Indicates the block size(MB) when writing data to + blob. Type: integer (or Expression with resultType integer). + :type block_size_in_mb: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSWriteSettings, self).__init__(**kwargs) + self.block_size_in_mb = kwargs.get('block_size_in_mb', None) + self.type = 'AzureBlobFSWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings_py3.py new file mode 100644 index 000000000000..351eae467183 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_write_settings_py3 import StoreWriteSettings + + +class AzureBlobFSWriteSettings(StoreWriteSettings): + """Azure blobFS write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + :param block_size_in_mb: Indicates the block size(MB) when writing data to + blob. Type: integer (or Expression with resultType integer). + :type block_size_in_mb: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, block_size_in_mb=None, **kwargs) -> None: + super(AzureBlobFSWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.block_size_in_mb = block_size_in_mb + self.type = 'AzureBlobFSWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py index e4466c4ce9c9..5246e02ab9b4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py @@ -29,7 +29,7 @@ class AzureBlobStorageLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py index 4587e0c95dad..ba0a511532b4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureBlobStorageLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py new file mode 100644 index 000000000000..1efbbeaec352 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class AzureBlobStorageLocation(DatasetLocation): + """The location of azure blob dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param container: Specify the container of azure blob. Type: string (or + Expression with resultType string). + :type container: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'container': {'key': 'container', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobStorageLocation, self).__init__(**kwargs) + self.container = kwargs.get('container', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py new file mode 100644 index 000000000000..63b122573039 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AzureBlobStorageLocation(DatasetLocation): + """The location of azure blob dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param container: Specify the container of azure blob. Type: string (or + Expression with resultType string). + :type container: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'container': {'key': 'container', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, container=None, **kwargs) -> None: + super(AzureBlobStorageLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.container = container diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings.py new file mode 100644 index 000000000000..42b11cc6de16 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings import StoreReadSettings + + +class AzureBlobStorageReadSettings(StoreReadSettings): + """Azure blob read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobStorageReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings_py3.py new file mode 100644 index 000000000000..495ea16afd98 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class AzureBlobStorageReadSettings(StoreReadSettings): + """Azure blob read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings.py new file mode 100644 index 000000000000..c2834839f28a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_write_settings import StoreWriteSettings + + +class AzureBlobStorageWriteSettings(StoreWriteSettings): + """Azure blob write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + :param block_size_in_mb: Indicates the block size(MB) when writing data to + blob. Type: integer (or Expression with resultType integer). + :type block_size_in_mb: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobStorageWriteSettings, self).__init__(**kwargs) + self.block_size_in_mb = kwargs.get('block_size_in_mb', None) + self.type = 'AzureBlobStorageWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings_py3.py new file mode 100644 index 000000000000..a37c83039a8c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_write_settings_py3 import StoreWriteSettings + + +class AzureBlobStorageWriteSettings(StoreWriteSettings): + """Azure blob write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + :param block_size_in_mb: Indicates the block size(MB) when writing data to + blob. Type: integer (or Expression with resultType integer). + :type block_size_in_mb: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, block_size_in_mb=None, **kwargs) -> None: + super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.block_size_in_mb = block_size_in_mb + self.type = 'AzureBlobStorageWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity.py new file mode 100644 index 000000000000..308d445d1726 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class AzureDataExplorerCommandActivity(ExecutionActivity): + """Azure Data Explorer command activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param command: Required. A control command, according to the Azure Data + Explorer command syntax. Type: string (or Expression with resultType + string). + :type command: object + :param command_timeout: Control command timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..) + :type command_timeout: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'command': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerCommandActivity, self).__init__(**kwargs) + self.command = kwargs.get('command', None) + self.command_timeout = kwargs.get('command_timeout', None) + self.type = 'AzureDataExplorerCommand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity_py3.py new file mode 100644 index 000000000000..2f04dfddf08f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class AzureDataExplorerCommandActivity(ExecutionActivity): + """Azure Data Explorer command activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param command: Required. A control command, according to the Azure Data + Explorer command syntax. Type: string (or Expression with resultType + string). + :type command: object + :param command_timeout: Control command timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..) + :type command_timeout: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'command': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, + } + + def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, command_timeout=None, **kwargs) -> None: + super(AzureDataExplorerCommandActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.command = command + self.command_timeout = command_timeout + self.type = 'AzureDataExplorerCommand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py new file mode 100644 index 000000000000..5e5a9f7560c6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureDataExplorerLinkedService(LinkedService): + """Azure Data Explorer (Kusto) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of Azure Data Explorer (the + engine's endpoint). URL will be in the format + https://..kusto.windows.net. Type: string (or + Expression with resultType string) + :type endpoint: object + :param service_principal_id: Required. The ID of the service principal + used to authenticate against Azure Data Explorer. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The key of the service principal + used to authenticate against Kusto. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param tenant: Required. The name or ID of the tenant to which the service + principal belongs. Type: string (or Expression with resultType string). + :type tenant: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'database': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.database = kwargs.get('database', None) + self.tenant = kwargs.get('tenant', None) + self.type = 'AzureDataExplorer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py new file mode 100644 index 000000000000..3cd8ab9c3c19 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureDataExplorerLinkedService(LinkedService): + """Azure Data Explorer (Kusto) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of Azure Data Explorer (the + engine's endpoint). URL will be in the format + https://..kusto.windows.net. Type: string (or + Expression with resultType string) + :type endpoint: object + :param service_principal_id: Required. The ID of the service principal + used to authenticate against Azure Data Explorer. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The key of the service principal + used to authenticate against Kusto. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param tenant: Required. The name or ID of the tenant to which the service + principal belongs. Type: string (or Expression with resultType string). + :type tenant: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'database': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + } + + def __init__(self, *, endpoint, service_principal_id, service_principal_key, database, tenant, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.database = database + self.tenant = tenant + self.type = 'AzureDataExplorer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py new file mode 100644 index 000000000000..5c204ab769e4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureDataExplorerSink(CopySink): + """A copy activity Azure Data Explorer sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param ingestion_mapping_name: A name of a pre-created csv mapping that + was defined on the target Kusto table. Type: string. + :type ingestion_mapping_name: object + :param ingestion_mapping_as_json: An explicit column mapping description + provided in a json format. Type: string. + :type ingestion_mapping_as_json: object + :param flush_immediately: If set to true, any aggregation will be skipped. + Default is false. Type: boolean. + :type flush_immediately: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, + 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, + 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerSink, self).__init__(**kwargs) + self.ingestion_mapping_name = kwargs.get('ingestion_mapping_name', None) + self.ingestion_mapping_as_json = kwargs.get('ingestion_mapping_as_json', None) + self.flush_immediately = kwargs.get('flush_immediately', None) + self.type = 'AzureDataExplorerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py new file mode 100644 index 000000000000..e5cb67bc79b8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureDataExplorerSink(CopySink): + """A copy activity Azure Data Explorer sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param ingestion_mapping_name: A name of a pre-created csv mapping that + was defined on the target Kusto table. Type: string. + :type ingestion_mapping_name: object + :param ingestion_mapping_as_json: An explicit column mapping description + provided in a json format. Type: string. + :type ingestion_mapping_as_json: object + :param flush_immediately: If set to true, any aggregation will be skipped. + Default is false. Type: boolean. + :type flush_immediately: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, + 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, + 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ingestion_mapping_name=None, ingestion_mapping_as_json=None, flush_immediately=None, **kwargs) -> None: + super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.ingestion_mapping_name = ingestion_mapping_name + self.ingestion_mapping_as_json = ingestion_mapping_as_json + self.flush_immediately = flush_immediately + self.type = 'AzureDataExplorerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py new file mode 100644 index 000000000000..2caaa517efd5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureDataExplorerSource(CopySource): + """A copy activity Azure Data Explorer (Kusto) source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Required. Database query. Should be a Kusto Query Language + (KQL) query. Type: string (or Expression with resultType string). + :type query: object + :param no_truncation: The name of the Boolean option that controls whether + truncation is applied to result-sets that go beyond a certain row-count + limit. + :type no_truncation: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + 'query': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.no_truncation = kwargs.get('no_truncation', None) + self.query_timeout = kwargs.get('query_timeout', None) + self.type = 'AzureDataExplorerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py new file mode 100644 index 000000000000..55a6bc78ee04 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureDataExplorerSource(CopySource): + """A copy activity Azure Data Explorer (Kusto) source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Required. Database query. Should be a Kusto Query Language + (KQL) query. Type: string (or Expression with resultType string). + :type query: object + :param no_truncation: The name of the Boolean option that controls whether + truncation is applied to result-sets that go beyond a certain row-count + limit. + :type no_truncation: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + 'query': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + def __init__(self, *, query, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, no_truncation=None, query_timeout=None, **kwargs) -> None: + super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.no_truncation = no_truncation + self.query_timeout = query_timeout + self.type = 'AzureDataExplorerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py new file mode 100644 index 000000000000..594d22171f48 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureDataExplorerTableDataset(Dataset): + """The Azure Data Explorer (Kusto) dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table: The table name of the Azure Data Explorer database. Type: + string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerTableDataset, self).__init__(**kwargs) + self.table = kwargs.get('table', None) + self.type = 'AzureDataExplorerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py new file mode 100644 index 000000000000..d36b0f39c2fe --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureDataExplorerTableDataset(Dataset): + """The Azure Data Explorer (Kusto) dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table: The table name of the Azure Data Explorer database. Type: + string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table=None, **kwargs) -> None: + super(AzureDataExplorerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table = table + self.type = 'AzureDataExplorerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py index 73ec2b6f9de9..0381e1b1de65 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py @@ -29,7 +29,7 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py index b6c4b993cae7..93250e2cef76 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py index e0299ba2bcad..de15057f78ed 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py @@ -43,8 +43,8 @@ class AzureDataLakeStoreDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param folder_path: Required. Path to the folder in the Azure Data Lake - Store. Type: string (or Expression with resultType string). + :param folder_path: Path to the folder in the Azure Data Lake Store. Type: + string (or Expression with resultType string). :type folder_path: object :param file_name: The name of the file in the Azure Data Lake Store. Type: string (or Expression with resultType string). @@ -59,7 +59,6 @@ class AzureDataLakeStoreDataset(Dataset): _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'folder_path': {'required': True}, } _attribute_map = { diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py index 62e761dc9695..d2df0ffebe7e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py @@ -43,8 +43,8 @@ class AzureDataLakeStoreDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param folder_path: Required. Path to the folder in the Azure Data Lake - Store. Type: string (or Expression with resultType string). + :param folder_path: Path to the folder in the Azure Data Lake Store. Type: + string (or Expression with resultType string). :type folder_path: object :param file_name: The name of the file in the Azure Data Lake Store. Type: string (or Expression with resultType string). @@ -59,7 +59,6 @@ class AzureDataLakeStoreDataset(Dataset): _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'folder_path': {'required': True}, } _attribute_map = { @@ -78,7 +77,7 @@ class AzureDataLakeStoreDataset(Dataset): 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, *, linked_service_name, folder_path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, file_name=None, format=None, compression=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None, **kwargs) -> None: super(AzureDataLakeStoreDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.folder_path = folder_path self.file_name = file_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py index 0c39866887ef..f08e086cb500 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py @@ -29,7 +29,7 @@ class AzureDataLakeStoreLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py index 10e3b72e654e..7b8ab293c0cf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureDataLakeStoreLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py new file mode 100644 index 000000000000..a4bf521a2005 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class AzureDataLakeStoreLocation(DatasetLocation): + """The location of azure data lake store dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py new file mode 100644 index 000000000000..e7955731fc31 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AzureDataLakeStoreLocation(DatasetLocation): + """The location of azure data lake store dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(AzureDataLakeStoreLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings.py new file mode 100644 index 000000000000..213d69966baf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings import StoreReadSettings + + +class AzureDataLakeStoreReadSettings(StoreReadSettings): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: ADLS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings_py3.py new file mode 100644 index 000000000000..b4bccc5e78a3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class AzureDataLakeStoreReadSettings(StoreReadSettings): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: ADLS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py index ceaabf438097..e882698c2ca6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py @@ -34,12 +34,16 @@ class AzureDataLakeStoreSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param enable_adls_single_file_parallel: Single File Parallel. + :type enable_adls_single_file_parallel: object """ _validation = { @@ -52,11 +56,14 @@ class AzureDataLakeStoreSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } def __init__(self, **kwargs): super(AzureDataLakeStoreSink, self).__init__(**kwargs) self.copy_behavior = kwargs.get('copy_behavior', None) + self.enable_adls_single_file_parallel = kwargs.get('enable_adls_single_file_parallel', None) self.type = 'AzureDataLakeStoreSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py index 449c7b0a2a3e..0f96cea725e2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py @@ -34,12 +34,16 @@ class AzureDataLakeStoreSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param enable_adls_single_file_parallel: Single File Parallel. + :type enable_adls_single_file_parallel: object """ _validation = { @@ -52,11 +56,14 @@ class AzureDataLakeStoreSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, copy_behavior=None, **kwargs) -> None: - super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, enable_adls_single_file_parallel=None, **kwargs) -> None: + super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.copy_behavior = copy_behavior + self.enable_adls_single_file_parallel = enable_adls_single_file_parallel self.type = 'AzureDataLakeStoreSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py index 60a6599c8fbb..9d2046049a30 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py @@ -27,6 +27,10 @@ class AzureDataLakeStoreSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read @@ -43,6 +47,7 @@ class AzureDataLakeStoreSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py index d228d787bff4..e1d883972220 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py @@ -27,6 +27,10 @@ class AzureDataLakeStoreSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read @@ -43,11 +47,12 @@ class AzureDataLakeStoreSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, recursive=None, **kwargs) -> None: - super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, **kwargs) -> None: + super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.type = 'AzureDataLakeStoreSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings.py new file mode 100644 index 000000000000..6cf8deeacb07 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_write_settings import StoreWriteSettings + + +class AzureDataLakeStoreWriteSettings(StoreWriteSettings): + """Azure data lake store write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreWriteSettings, self).__init__(**kwargs) + self.type = 'AzureDataLakeStoreWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings_py3.py new file mode 100644 index 000000000000..0b9a0e38e41c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_write_settings_py3 import StoreWriteSettings + + +class AzureDataLakeStoreWriteSettings(StoreWriteSettings): + """Azure data lake store write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.type = 'AzureDataLakeStoreWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py index c036b299fff0..6cc4c12674cb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py @@ -29,7 +29,7 @@ class AzureDatabricksLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -64,6 +64,16 @@ class AzureDatabricksLinkedService(LinkedService): :type new_cluster_spark_env_vars: dict[str, object] :param new_cluster_custom_tags: Additional tags for cluster resources. :type new_cluster_custom_tags: dict[str, object] + :param new_cluster_driver_node_type: The driver node type for the new + cluster. Type: string (or Expression with resultType string). + :type new_cluster_driver_node_type: object + :param new_cluster_init_scripts: User-defined initialization scripts for + the new cluster. Type: array of strings (or Expression with resultType + array of strings). + :type new_cluster_init_scripts: object + :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new + cluster. Type: boolean (or Expression with resultType boolean). + :type new_cluster_enable_elastic_disk: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -92,6 +102,9 @@ class AzureDatabricksLinkedService(LinkedService): 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, + 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, + 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, + 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -106,5 +119,8 @@ def __init__(self, **kwargs): self.new_cluster_spark_conf = kwargs.get('new_cluster_spark_conf', None) self.new_cluster_spark_env_vars = kwargs.get('new_cluster_spark_env_vars', None) self.new_cluster_custom_tags = kwargs.get('new_cluster_custom_tags', None) + self.new_cluster_driver_node_type = kwargs.get('new_cluster_driver_node_type', None) + self.new_cluster_init_scripts = kwargs.get('new_cluster_init_scripts', None) + self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AzureDatabricks' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py index 8060311a4e0d..6299dac1e3f2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureDatabricksLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -64,6 +64,16 @@ class AzureDatabricksLinkedService(LinkedService): :type new_cluster_spark_env_vars: dict[str, object] :param new_cluster_custom_tags: Additional tags for cluster resources. :type new_cluster_custom_tags: dict[str, object] + :param new_cluster_driver_node_type: The driver node type for the new + cluster. Type: string (or Expression with resultType string). + :type new_cluster_driver_node_type: object + :param new_cluster_init_scripts: User-defined initialization scripts for + the new cluster. Type: array of strings (or Expression with resultType + array of strings). + :type new_cluster_init_scripts: object + :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new + cluster. Type: boolean (or Expression with resultType boolean). + :type new_cluster_enable_elastic_disk: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -92,10 +102,13 @@ class AzureDatabricksLinkedService(LinkedService): 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, + 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, + 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, + 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, domain, access_token, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, existing_cluster_id=None, new_cluster_version=None, new_cluster_num_of_worker=None, new_cluster_node_type=None, new_cluster_spark_conf=None, new_cluster_spark_env_vars=None, new_cluster_custom_tags=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, domain, access_token, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, existing_cluster_id=None, new_cluster_version=None, new_cluster_num_of_worker=None, new_cluster_node_type=None, new_cluster_spark_conf=None, new_cluster_spark_env_vars=None, new_cluster_custom_tags=None, new_cluster_driver_node_type=None, new_cluster_init_scripts=None, new_cluster_enable_elastic_disk=None, encrypted_credential=None, **kwargs) -> None: super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.domain = domain self.access_token = access_token @@ -106,5 +119,8 @@ def __init__(self, *, domain, access_token, additional_properties=None, connect_ self.new_cluster_spark_conf = new_cluster_spark_conf self.new_cluster_spark_env_vars = new_cluster_spark_env_vars self.new_cluster_custom_tags = new_cluster_custom_tags + self.new_cluster_driver_node_type = new_cluster_driver_node_type + self.new_cluster_init_scripts = new_cluster_init_scripts + self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk self.encrypted_credential = encrypted_credential self.type = 'AzureDatabricks' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py index 44917d8d23b9..2ed5b870a778 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py @@ -29,7 +29,7 @@ class AzureFunctionLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py index b6b0f9600da1..a1bfdbe8b6c1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureFunctionLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py index c7ad622591ee..768f0d83ae93 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py @@ -29,7 +29,7 @@ class AzureKeyVaultLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py index e13cf7fb527a..50f4a58a5a1b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureKeyVaultLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service.py new file mode 100644 index 000000000000..d2dc7db88851 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureMariaDBLinkedService(LinkedService): + """Azure Database for MariaDB linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMariaDBLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureMariaDB' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service_py3.py new file mode 100644 index 000000000000..c80015ed6b45 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureMariaDBLinkedService(LinkedService): + """Azure Database for MariaDB linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(AzureMariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'AzureMariaDB' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source.py new file mode 100644 index 000000000000..229e6f4311e3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureMariaDBSource(CopySource): + """A copy activity Azure MariaDB source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMariaDBSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'AzureMariaDBSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source_py3.py new file mode 100644 index 000000000000..11358f899e51 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureMariaDBSource(CopySource): + """A copy activity Azure MariaDB source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'AzureMariaDBSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset.py new file mode 100644 index 000000000000..a06c722279f2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureMariaDBTableDataset(Dataset): + """Azure Database for MariaDB dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMariaDBTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'AzureMariaDBTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset_py3.py new file mode 100644 index 000000000000..9c6fd648af20 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureMariaDBTableDataset(Dataset): + """Azure Database for MariaDB dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(AzureMariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'AzureMariaDBTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py index a6a19be4069b..08dfec98a6bf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py @@ -29,7 +29,7 @@ class AzureMLLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py index 0fff3cea9b8a..c77a692adc03 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureMLLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py index 64a072f1f38b..aedbdbb73eb5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py @@ -29,7 +29,7 @@ class AzureMySqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py index dcf4861da573..57692275f564 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureMySqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink.py new file mode 100644 index 000000000000..b3ee0bbc8645 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureMySqlSink(CopySink): + """A copy activity Azure MySql sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMySqlSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.type = 'AzureMySqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink_py3.py new file mode 100644 index 000000000000..340c10f5988b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureMySqlSink(CopySink): + """A copy activity Azure MySql sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'AzureMySqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py index 7409be73bd09..823336432567 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py @@ -27,6 +27,10 @@ class AzureMySqlSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType @@ -42,6 +46,7 @@ class AzureMySqlSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py index 4e1d35981f78..7030738d2615 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py @@ -27,6 +27,10 @@ class AzureMySqlSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType @@ -42,11 +46,12 @@ class AzureMySqlSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'AzureMySqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py index 89c9b29cdcde..92359d6d6a10 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py @@ -29,7 +29,7 @@ class AzurePostgreSqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py index e885498530ed..47f8f17980f8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py @@ -29,7 +29,7 @@ class AzurePostgreSqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink.py new file mode 100644 index 000000000000..6214e1ba1f22 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzurePostgreSqlSink(CopySink): + """A copy activity Azure PostgreSQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzurePostgreSqlSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.type = 'AzurePostgreSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink_py3.py new file mode 100644 index 000000000000..b7cd0ec51a29 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzurePostgreSqlSink(CopySink): + """A copy activity Azure PostgreSQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'AzurePostgreSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py index 816e066ecebb..e0cd62fd8028 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py @@ -27,6 +27,10 @@ class AzurePostgreSqlSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class AzurePostgreSqlSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py index 2af53cf91da2..0362b0dca390 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py @@ -27,6 +27,10 @@ class AzurePostgreSqlSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class AzurePostgreSqlSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'AzurePostgreSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py index 8960acc0df75..933264b57a9b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py @@ -43,9 +43,17 @@ class AzurePostgreSqlTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with + :param table_name: The table name of the Azure PostgreSQL database which + includes both schema and table. Type: string (or Expression with resultType string). :type table_name: object + :param table: The table name of the Azure PostgreSQL database. Type: + string (or Expression with resultType string). + :type table: object + :param azure_postgre_sql_table_dataset_schema: The schema name of the + Azure PostgreSQL database. Type: string (or Expression with resultType + string). + :type azure_postgre_sql_table_dataset_schema: object """ _validation = { @@ -64,9 +72,13 @@ class AzurePostgreSqlTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'azure_postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, **kwargs): super(AzurePostgreSqlTableDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.azure_postgre_sql_table_dataset_schema = kwargs.get('azure_postgre_sql_table_dataset_schema', None) self.type = 'AzurePostgreSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py index fddf0720c565..485dc3efb102 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py @@ -43,9 +43,17 @@ class AzurePostgreSqlTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with + :param table_name: The table name of the Azure PostgreSQL database which + includes both schema and table. Type: string (or Expression with resultType string). :type table_name: object + :param table: The table name of the Azure PostgreSQL database. Type: + string (or Expression with resultType string). + :type table: object + :param azure_postgre_sql_table_dataset_schema: The schema name of the + Azure PostgreSQL database. Type: string (or Expression with resultType + string). + :type azure_postgre_sql_table_dataset_schema: object """ _validation = { @@ -64,9 +72,13 @@ class AzurePostgreSqlTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'azure_postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, azure_postgre_sql_table_dataset_schema=None, **kwargs) -> None: super(AzurePostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.table = table + self.azure_postgre_sql_table_dataset_schema = azure_postgre_sql_table_dataset_schema self.type = 'AzurePostgreSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py index 5ecb911fb94a..9f3a63db4978 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py @@ -34,6 +34,10 @@ class AzureQueueSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ @@ -48,6 +52,7 @@ class AzureQueueSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py index debc14c0c7e1..db2fb60ddb1e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py @@ -34,6 +34,10 @@ class AzureQueueSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ @@ -48,9 +52,10 @@ class AzureQueueSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, **kwargs) -> None: - super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'AzureQueueSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py index c09cd94bfb51..9aae64af8da0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py @@ -34,6 +34,10 @@ class AzureSearchIndexSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specify the write behavior when upserting documents @@ -52,6 +56,7 @@ class AzureSearchIndexSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py index 9ed48b36a588..3cd887a2512c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py @@ -34,6 +34,10 @@ class AzureSearchIndexSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specify the write behavior when upserting documents @@ -52,11 +56,12 @@ class AzureSearchIndexSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, write_behavior=None, **kwargs) -> None: - super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.type = 'AzureSearchIndexSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py index 18979ed87ca0..782799cd5b28 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py @@ -29,7 +29,7 @@ class AzureSearchLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py index 6cc3cdc98b89..8589c3aead91 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureSearchLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py index 68ad549ed733..0da66637a04f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py @@ -29,7 +29,7 @@ class AzureSqlDatabaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py index afd58ae43354..dbcf6c88b134 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureSqlDatabaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py index d4aa961cb424..cc7c9d58d19f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py @@ -29,7 +29,7 @@ class AzureSqlDWLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py index a78551dff273..5c75f3904b37 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureSqlDWLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py index e12464efdd49..ed9fe8904d73 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py @@ -43,15 +43,20 @@ class AzureSqlDWTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: Required. The table name of the Azure SQL Data - Warehouse. Type: string (or Expression with resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param azure_sql_dw_table_dataset_schema: The schema name of the Azure SQL + Data Warehouse. Type: string (or Expression with resultType string). + :type azure_sql_dw_table_dataset_schema: object + :param table: The table name of the Azure SQL Data Warehouse. Type: string + (or Expression with resultType string). + :type table: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { @@ -65,9 +70,13 @@ class AzureSqlDWTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_dw_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__(self, **kwargs): super(AzureSqlDWTableDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.azure_sql_dw_table_dataset_schema = kwargs.get('azure_sql_dw_table_dataset_schema', None) + self.table = kwargs.get('table', None) self.type = 'AzureSqlDWTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py index a06073f86c5b..a38e4ab479c9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py @@ -43,15 +43,20 @@ class AzureSqlDWTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: Required. The table name of the Azure SQL Data - Warehouse. Type: string (or Expression with resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param azure_sql_dw_table_dataset_schema: The schema name of the Azure SQL + Data Warehouse. Type: string (or Expression with resultType string). + :type azure_sql_dw_table_dataset_schema: object + :param table: The table name of the Azure SQL Data Warehouse. Type: string + (or Expression with resultType string). + :type table: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { @@ -65,9 +70,13 @@ class AzureSqlDWTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_dw_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_dw_table_dataset_schema=None, table=None, **kwargs) -> None: super(AzureSqlDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.azure_sql_dw_table_dataset_schema = azure_sql_dw_table_dataset_schema + self.table = table self.type = 'AzureSqlDWTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py new file mode 100644 index 000000000000..2aab3a145ff2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureSqlMILinkedService(LinkedService): + """Azure SQL Managed Instance linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Managed Instance. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Managed Instance. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlMILinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureSqlMI' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py new file mode 100644 index 000000000000..ec1a2e5e8549 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureSqlMILinkedService(LinkedService): + """Azure SQL Managed Instance linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Managed Instance. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Managed Instance. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureSqlMILinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureSqlMI' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py new file mode 100644 index 000000000000..1128a9e8cb06 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureSqlMITableDataset(Dataset): + """The Azure SQL Managed Instance dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param azure_sql_mi_table_dataset_schema: The schema name of the Azure SQL + Managed Instance. Type: string (or Expression with resultType string). + :type azure_sql_mi_table_dataset_schema: object + :param table: The table name of the Azure SQL Managed Instance dataset. + Type: string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_mi_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlMITableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.azure_sql_mi_table_dataset_schema = kwargs.get('azure_sql_mi_table_dataset_schema', None) + self.table = kwargs.get('table', None) + self.type = 'AzureSqlMITable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py new file mode 100644 index 000000000000..ac72614e3ed4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureSqlMITableDataset(Dataset): + """The Azure SQL Managed Instance dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param azure_sql_mi_table_dataset_schema: The schema name of the Azure SQL + Managed Instance. Type: string (or Expression with resultType string). + :type azure_sql_mi_table_dataset_schema: object + :param table: The table name of the Azure SQL Managed Instance dataset. + Type: string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_mi_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_mi_table_dataset_schema=None, table=None, **kwargs) -> None: + super(AzureSqlMITableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.azure_sql_mi_table_dataset_schema = azure_sql_mi_table_dataset_schema + self.table = table + self.type = 'AzureSqlMITable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py new file mode 100644 index 000000000000..5d93df3d790a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureSqlSink(CopySink): + """A copy activity Azure SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlSink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) + self.type = 'AzureSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py new file mode 100644 index 000000000000..e4d5e66e18c5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureSqlSink(CopySink): + """A copy activity Azure SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: + super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option + self.type = 'AzureSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py new file mode 100644 index 000000000000..b6c62f9a3164 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureSqlSource(CopySource): + """A copy activity Azure SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlSource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.type = 'AzureSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py new file mode 100644 index 000000000000..cb5c33d28bb2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureSqlSource(CopySource): + """A copy activity Azure SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types + self.type = 'AzureSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py index 9a078241d620..ce8b08944f3a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py @@ -43,15 +43,20 @@ class AzureSqlTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: Required. The table name of the Azure SQL database. - Type: string (or Expression with resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param azure_sql_table_dataset_schema: The schema name of the Azure SQL + database. Type: string (or Expression with resultType string). + :type azure_sql_table_dataset_schema: object + :param table: The table name of the Azure SQL database. Type: string (or + Expression with resultType string). + :type table: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { @@ -65,9 +70,13 @@ class AzureSqlTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__(self, **kwargs): super(AzureSqlTableDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.azure_sql_table_dataset_schema = kwargs.get('azure_sql_table_dataset_schema', None) + self.table = kwargs.get('table', None) self.type = 'AzureSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py index b3c388202f52..3ed19ee47e7e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py @@ -43,15 +43,20 @@ class AzureSqlTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: Required. The table name of the Azure SQL database. - Type: string (or Expression with resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param azure_sql_table_dataset_schema: The schema name of the Azure SQL + database. Type: string (or Expression with resultType string). + :type azure_sql_table_dataset_schema: object + :param table: The table name of the Azure SQL database. Type: string (or + Expression with resultType string). + :type table: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { @@ -65,9 +70,13 @@ class AzureSqlTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_table_dataset_schema=None, table=None, **kwargs) -> None: super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.azure_sql_table_dataset_schema = azure_sql_table_dataset_schema + self.table = table self.type = 'AzureSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py index 711b09a80004..202dd7229b90 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py @@ -29,7 +29,7 @@ class AzureStorageLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py index 428fb82e871a..4fac19b70849 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureStorageLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py index faba497cc734..3459c9ad3ba1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py @@ -34,6 +34,10 @@ class AzureTableSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param azure_table_default_partition_key_value: Azure Table default @@ -60,6 +64,7 @@ class AzureTableSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py index 630df4f1f606..a15247544879 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py @@ -34,6 +34,10 @@ class AzureTableSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param azure_table_default_partition_key_value: Azure Table default @@ -60,6 +64,7 @@ class AzureTableSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, @@ -67,8 +72,8 @@ class AzureTableSink(CopySink): 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, azure_table_default_partition_key_value=None, azure_table_partition_key_name=None, azure_table_row_key_name=None, azure_table_insert_type=None, **kwargs) -> None: - super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, azure_table_default_partition_key_value=None, azure_table_partition_key_name=None, azure_table_row_key_name=None, azure_table_insert_type=None, **kwargs) -> None: + super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.azure_table_default_partition_key_value = azure_table_default_partition_key_value self.azure_table_partition_key_name = azure_table_partition_key_name self.azure_table_row_key_name = azure_table_row_key_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py index f4046c989f4e..fa7ead73eaa9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py @@ -27,6 +27,10 @@ class AzureTableSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param azure_table_source_query: Azure Table source query. Type: string @@ -46,6 +50,7 @@ class AzureTableSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py index 30ca05775f27..efbac5613219 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py @@ -27,6 +27,10 @@ class AzureTableSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param azure_table_source_query: Azure Table source query. Type: string @@ -46,13 +50,14 @@ class AzureTableSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, azure_table_source_query=None, azure_table_source_ignore_table_not_found=None, **kwargs) -> None: - super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, azure_table_source_query=None, azure_table_source_ignore_table_not_found=None, **kwargs) -> None: + super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.azure_table_source_query = azure_table_source_query self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found self.type = 'AzureTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py index 152fae6368a6..c2a8c2498ea6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py @@ -29,7 +29,7 @@ class AzureTableStorageLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py index 533ad3509483..8d4e62c4f3e6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureTableStorageLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset.py new file mode 100644 index 000000000000..5f0f8ef96696 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class BinaryDataset(Dataset): + """Binary dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the Binary storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression: The data compression method used for the binary + dataset. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(BinaryDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.compression = kwargs.get('compression', None) + self.type = 'Binary' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset_py3.py new file mode 100644 index 000000000000..7d26b216fd7a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class BinaryDataset(Dataset): + """Binary dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the Binary storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression: The data compression method used for the binary + dataset. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression=None, **kwargs) -> None: + super(BinaryDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.compression = compression + self.type = 'Binary' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink.py new file mode 100644 index 000000000000..b991bfee53c7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class BinarySink(CopySink): + """A copy activity Binary sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Binary store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + } + + def __init__(self, **kwargs): + super(BinarySink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'BinarySink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink_py3.py new file mode 100644 index 000000000000..80421d161aed --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class BinarySink(CopySink): + """A copy activity Binary sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Binary store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'BinarySink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source.py new file mode 100644 index 000000000000..48e78e7d24bf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class BinarySource(CopySource): + """A copy activity Binary source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Binary store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, **kwargs): + super(BinarySource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'BinarySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source_py3.py new file mode 100644 index 000000000000..aa9a9f1412ab --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source_py3.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class BinarySource(CopySource): + """A copy activity Binary source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Binary store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'BinarySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py index 681cc44d278b..673d34167fed 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py @@ -30,6 +30,9 @@ class BlobEventsTrigger(MultiplePipelineTrigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. @@ -64,6 +67,7 @@ class BlobEventsTrigger(MultiplePipelineTrigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py index 08d9c542f4af..fb65a420a2cd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py @@ -30,6 +30,9 @@ class BlobEventsTrigger(MultiplePipelineTrigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. @@ -64,6 +67,7 @@ class BlobEventsTrigger(MultiplePipelineTrigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, @@ -72,8 +76,8 @@ class BlobEventsTrigger(MultiplePipelineTrigger): 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, } - def __init__(self, *, events, scope: str, additional_properties=None, description: str=None, pipelines=None, blob_path_begins_with: str=None, blob_path_ends_with: str=None, **kwargs) -> None: - super(BlobEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, pipelines=pipelines, **kwargs) + def __init__(self, *, events, scope: str, additional_properties=None, description: str=None, annotations=None, pipelines=None, blob_path_begins_with: str=None, blob_path_ends_with: str=None, **kwargs) -> None: + super(BlobEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) self.blob_path_begins_with = blob_path_begins_with self.blob_path_ends_with = blob_path_ends_with self.events = events diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py index fe90f5836faf..284e0fcecde5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py @@ -34,6 +34,10 @@ class BlobSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param blob_writer_overwrite_files: Blob writer overwrite files. Type: @@ -45,10 +49,8 @@ class BlobSink(CopySink): :param blob_writer_add_header: Blob writer add header. Type: boolean (or Expression with resultType boolean). :type blob_writer_add_header: object - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -61,11 +63,12 @@ class BlobSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py index 1d6ac96aff6e..370acc72e017 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py @@ -34,6 +34,10 @@ class BlobSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param blob_writer_overwrite_files: Blob writer overwrite files. Type: @@ -45,10 +49,8 @@ class BlobSink(CopySink): :param blob_writer_add_header: Blob writer add header. Type: boolean (or Expression with resultType boolean). :type blob_writer_add_header: object - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -61,15 +63,16 @@ class BlobSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None, **kwargs) -> None: - super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None, **kwargs) -> None: + super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.blob_writer_overwrite_files = blob_writer_overwrite_files self.blob_writer_date_time_format = blob_writer_date_time_format self.blob_writer_add_header = blob_writer_add_header diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py index f563d0af1e2d..ab4313a890cb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py @@ -27,6 +27,10 @@ class BlobSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param treat_empty_as_null: Treat empty as null. Type: boolean (or @@ -49,6 +53,7 @@ class BlobSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py index 5b9dc775f069..78d90cc61e13 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py @@ -27,6 +27,10 @@ class BlobSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param treat_empty_as_null: Treat empty as null. Type: boolean (or @@ -49,14 +53,15 @@ class BlobSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: - super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: + super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.treat_empty_as_null = treat_empty_as_null self.skip_header_line_count = skip_header_line_count self.recursive = recursive diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py index 6abdece68966..4fd5b5c17100 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py @@ -30,6 +30,9 @@ class BlobTrigger(MultiplePipelineTrigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. @@ -59,6 +62,7 @@ class BlobTrigger(MultiplePipelineTrigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py index 2c80ac605368..cccffd881bfb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py @@ -30,6 +30,9 @@ class BlobTrigger(MultiplePipelineTrigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. @@ -59,6 +62,7 @@ class BlobTrigger(MultiplePipelineTrigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, @@ -66,8 +70,8 @@ class BlobTrigger(MultiplePipelineTrigger): 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, } - def __init__(self, *, folder_path: str, max_concurrency: int, linked_service, additional_properties=None, description: str=None, pipelines=None, **kwargs) -> None: - super(BlobTrigger, self).__init__(additional_properties=additional_properties, description=description, pipelines=pipelines, **kwargs) + def __init__(self, *, folder_path: str, max_concurrency: int, linked_service, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: + super(BlobTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) self.folder_path = folder_path self.max_concurrency = max_concurrency self.linked_service = linked_service diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py index 974ce49a1c62..ebba2be42028 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py @@ -29,7 +29,7 @@ class CassandraLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py index dbc74f10002f..f22f303cc61d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py @@ -29,7 +29,7 @@ class CassandraLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py index fdd0a228d001..e7ba96c18682 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py @@ -27,6 +27,10 @@ class CassandraSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Should be a SQL-92 query expression or @@ -53,6 +57,7 @@ class CassandraSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py index 323d85d1e742..bd95d158b868 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py @@ -27,6 +27,10 @@ class CassandraSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Should be a SQL-92 query expression or @@ -53,13 +57,14 @@ class CassandraSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, consistency_level=None, **kwargs) -> None: - super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, consistency_level=None, **kwargs) -> None: + super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.consistency_level = consistency_level self.type = 'CassandraSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py new file mode 100644 index 000000000000..c7cd4c315852 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class CommonDataServiceForAppsEntityDataset(Dataset): + """The Common Data Service for Apps entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CommonDataServiceForAppsEntityDataset, self).__init__(**kwargs) + self.entity_name = kwargs.get('entity_name', None) + self.type = 'CommonDataServiceForAppsEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py new file mode 100644 index 000000000000..050bdb836963 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class CommonDataServiceForAppsEntityDataset(Dataset): + """The Common Data Service for Apps entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: + super(CommonDataServiceForAppsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.entity_name = entity_name + self.type = 'CommonDataServiceForAppsEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py new file mode 100644 index 000000000000..bbc8b7a0de65 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py @@ -0,0 +1,115 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class CommonDataServiceForAppsLinkedService(LinkedService): + """Common Data Service for Apps linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Common Data + Service for Apps instance. 'Online' for Common Data Service for Apps + Online and 'OnPremisesWithIfd' for Common Data Service for Apps + on-premises with Ifd. Type: string (or Expression with resultType string). + Possible values include: 'Online', 'OnPremisesWithIfd' + :type deployment_type: str or + ~azure.mgmt.datafactory.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Common Data Service for + Apps server. The property is required for on-prem and not allowed for + online. Type: string (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Common Data Service for Apps server. + The property is required for on-prem and not allowed for online. Default + is 443. Type: integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Common Data Service for Apps + server. The property is required for on-line and not allowed for on-prem. + Type: string (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Common Data Service + for Apps instance. The property is required for on-prem and required for + online when there are more than one Common Data Service for Apps instances + associated with the user. Type: string (or Expression with resultType + string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Common Data Service for Apps server. 'Office365' for online scenario, + 'Ifd' for on-premises with Ifd scenario. Type: string (or Expression with + resultType string). Possible values include: 'Office365', 'Ifd' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + :param username: Required. User name to access the Common Data Service for + Apps instance. Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Common Data Service for Apps + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CommonDataServiceForAppsLinkedService, self).__init__(**kwargs) + self.deployment_type = kwargs.get('deployment_type', None) + self.host_name = kwargs.get('host_name', None) + self.port = kwargs.get('port', None) + self.service_uri = kwargs.get('service_uri', None) + self.organization_name = kwargs.get('organization_name', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'CommonDataServiceForApps' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py new file mode 100644 index 000000000000..1c4897c09868 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py @@ -0,0 +1,115 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class CommonDataServiceForAppsLinkedService(LinkedService): + """Common Data Service for Apps linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Common Data + Service for Apps instance. 'Online' for Common Data Service for Apps + Online and 'OnPremisesWithIfd' for Common Data Service for Apps + on-premises with Ifd. Type: string (or Expression with resultType string). + Possible values include: 'Online', 'OnPremisesWithIfd' + :type deployment_type: str or + ~azure.mgmt.datafactory.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Common Data Service for + Apps server. The property is required for on-prem and not allowed for + online. Type: string (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Common Data Service for Apps server. + The property is required for on-prem and not allowed for online. Default + is 443. Type: integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Common Data Service for Apps + server. The property is required for on-line and not allowed for on-prem. + Type: string (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Common Data Service + for Apps instance. The property is required for on-prem and required for + online when there are more than one Common Data Service for Apps instances + associated with the user. Type: string (or Expression with resultType + string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Common Data Service for Apps server. 'Office365' for online scenario, + 'Ifd' for on-premises with Ifd scenario. Type: string (or Expression with + resultType string). Possible values include: 'Office365', 'Ifd' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + :param username: Required. User name to access the Common Data Service for + Apps instance. Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Common Data Service for Apps + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(CommonDataServiceForAppsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'CommonDataServiceForApps' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py new file mode 100644 index 000000000000..0df48841cccc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class CommonDataServiceForAppsSink(CopySink): + """A copy activity Common Data Service for Apps sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether to ignore null + values from input dataset (except key fields) during write operation. + Default is false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, **kwargs): + super(CommonDataServiceForAppsSink, self).__init__(**kwargs) + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.type = 'CommonDataServiceForAppsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py new file mode 100644 index 000000000000..80f85e6d5809 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class CommonDataServiceForAppsSink(CopySink): + """A copy activity Common Data Service for Apps sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether to ignore null + values from input dataset (except key fields) during write operation. + Default is false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.ignore_null_values = ignore_null_values + self.type = 'CommonDataServiceForAppsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py new file mode 100644 index 000000000000..13d2a6b921bb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class CommonDataServiceForAppsSource(CopySource): + """A copy activity Common Data Service for Apps source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Common Data Service for Apps (online & on-premises). Type: + string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CommonDataServiceForAppsSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'CommonDataServiceForAppsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py new file mode 100644 index 000000000000..713db90f9623 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class CommonDataServiceForAppsSource(CopySource): + """A copy activity Common Data Service for Apps source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Common Data Service for Apps (online & on-premises). Type: + string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'CommonDataServiceForAppsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py index 7b85f1400ff6..04179d0d1f53 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py @@ -29,7 +29,7 @@ class ConcurLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py index 6e17a2c9cc8e..4411db6d2856 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py @@ -29,7 +29,7 @@ class ConcurLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py index f8053415520c..11ae557c0cda 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py @@ -27,6 +27,10 @@ class ConcurSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class ConcurSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py index db9104869417..ac8ae8fb5a91 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py @@ -27,6 +27,10 @@ class ConcurSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class ConcurSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'ConcurSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py index 16581581786b..2242bc36beb2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py @@ -16,9 +16,9 @@ class ControlActivity(Activity): """Base class for all control activities like IfCondition, ForEach , Until. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AppendVariableActivity, SetVariableActivity, - FilterActivity, UntilActivity, WaitActivity, ForEachActivity, - IfConditionActivity, ExecutePipelineActivity + sub-classes are: WebHookActivity, AppendVariableActivity, + SetVariableActivity, FilterActivity, ValidationActivity, UntilActivity, + WaitActivity, ForEachActivity, IfConditionActivity, ExecutePipelineActivity All required parameters must be populated in order to send to Azure. @@ -52,7 +52,7 @@ class ControlActivity(Activity): } _subtype_map = { - 'type': {'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} + 'type': {'WebHook': 'WebHookActivity', 'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Validation': 'ValidationActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py index 739d8b9c311b..0aabd99d741f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py @@ -16,9 +16,9 @@ class ControlActivity(Activity): """Base class for all control activities like IfCondition, ForEach , Until. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AppendVariableActivity, SetVariableActivity, - FilterActivity, UntilActivity, WaitActivity, ForEachActivity, - IfConditionActivity, ExecutePipelineActivity + sub-classes are: WebHookActivity, AppendVariableActivity, + SetVariableActivity, FilterActivity, ValidationActivity, UntilActivity, + WaitActivity, ForEachActivity, IfConditionActivity, ExecutePipelineActivity All required parameters must be populated in order to send to Azure. @@ -52,7 +52,7 @@ class ControlActivity(Activity): } _subtype_map = { - 'type': {'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} + 'type': {'WebHook': 'WebHookActivity', 'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Validation': 'ValidationActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} } def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py index 9182efe2469a..2e7c00d551ba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py @@ -41,7 +41,7 @@ class CopyActivity(ExecutionActivity): :type sink: ~azure.mgmt.datafactory.models.CopySink :param translator: Copy activity translator. If not specified, tabular translator is used. - :type translator: ~azure.mgmt.datafactory.models.CopyTranslator + :type translator: object :param enable_staging: Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). @@ -65,6 +65,10 @@ class CopyActivity(ExecutionActivity): settings when EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings + :param preserve_rules: Preserve Rules. + :type preserve_rules: list[object] + :param preserve: Preserve rules. + :type preserve: list[object] :param inputs: List of inputs for the activity. :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] :param outputs: List of outputs for the activity. @@ -89,13 +93,15 @@ class CopyActivity(ExecutionActivity): 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, - 'translator': {'key': 'typeProperties.translator', 'type': 'CopyTranslator'}, + 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, + 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, + 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, } @@ -111,6 +117,8 @@ def __init__(self, **kwargs): self.data_integration_units = kwargs.get('data_integration_units', None) self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) + self.preserve_rules = kwargs.get('preserve_rules', None) + self.preserve = kwargs.get('preserve', None) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) self.type = 'Copy' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py index fd663bd71dc6..f8a1fee5625d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py @@ -41,7 +41,7 @@ class CopyActivity(ExecutionActivity): :type sink: ~azure.mgmt.datafactory.models.CopySink :param translator: Copy activity translator. If not specified, tabular translator is used. - :type translator: ~azure.mgmt.datafactory.models.CopyTranslator + :type translator: object :param enable_staging: Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). @@ -65,6 +65,10 @@ class CopyActivity(ExecutionActivity): settings when EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings + :param preserve_rules: Preserve Rules. + :type preserve_rules: list[object] + :param preserve: Preserve rules. + :type preserve: list[object] :param inputs: List of inputs for the activity. :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] :param outputs: List of outputs for the activity. @@ -89,18 +93,20 @@ class CopyActivity(ExecutionActivity): 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, - 'translator': {'key': 'typeProperties.translator', 'type': 'CopyTranslator'}, + 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, + 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, + 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, } - def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, inputs=None, outputs=None, **kwargs) -> None: + def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, preserve_rules=None, preserve=None, inputs=None, outputs=None, **kwargs) -> None: super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.source = source self.sink = sink @@ -111,6 +117,8 @@ def __init__(self, *, name: str, source, sink, additional_properties=None, descr self.data_integration_units = data_integration_units self.enable_skip_incompatible_row = enable_skip_incompatible_row self.redirect_incompatible_row_settings = redirect_incompatible_row_settings + self.preserve_rules = preserve_rules + self.preserve = preserve self.inputs = inputs self.outputs = outputs self.type = 'Copy' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py index 58b55bf39bbc..cbe8f2ecf8f7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py @@ -16,10 +16,15 @@ class CopySink(Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SalesforceSink, DynamicsSink, OdbcSink, - AzureSearchIndexSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, - SqlSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, - AzureTableSink, AzureQueueSink, SapCloudForCustomerSink + sub-classes are: CosmosDbMongoDbApiSink, SalesforceServiceCloudSink, + SalesforceSink, AzureDataExplorerSink, CommonDataServiceForAppsSink, + DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, + AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, + SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, + DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, + ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, + SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, JsonSink, + DelimitedTextSink All required parameters must be populated in order to send to Azure. @@ -40,6 +45,10 @@ class CopySink(Model): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ @@ -54,11 +63,12 @@ class CopySink(Model): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { - 'type': {'SalesforceSink': 'SalesforceSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, **kwargs): @@ -68,4 +78,5 @@ def __init__(self, **kwargs): self.write_batch_timeout = kwargs.get('write_batch_timeout', None) self.sink_retry_count = kwargs.get('sink_retry_count', None) self.sink_retry_wait = kwargs.get('sink_retry_wait', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py index 02dfd30c931e..3720bece5674 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py @@ -16,10 +16,15 @@ class CopySink(Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SalesforceSink, DynamicsSink, OdbcSink, - AzureSearchIndexSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, - SqlSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, - AzureTableSink, AzureQueueSink, SapCloudForCustomerSink + sub-classes are: CosmosDbMongoDbApiSink, SalesforceServiceCloudSink, + SalesforceSink, AzureDataExplorerSink, CommonDataServiceForAppsSink, + DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, + AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, + SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, + DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, + ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, + SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, JsonSink, + DelimitedTextSink All required parameters must be populated in order to send to Azure. @@ -40,6 +45,10 @@ class CopySink(Model): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ @@ -54,18 +63,20 @@ class CopySink(Model): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { - 'type': {'SalesforceSink': 'SalesforceSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: super(CopySink, self).__init__(**kwargs) self.additional_properties = additional_properties self.write_batch_size = write_batch_size self.write_batch_timeout = write_batch_timeout self.sink_retry_count = sink_retry_count self.sink_retry_wait = sink_retry_wait + self.max_concurrent_connections = max_concurrent_connections self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py index 9a11107fc8e8..b37bca86de5e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py @@ -16,19 +16,27 @@ class CopySource(Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonRedshiftSource, ResponsysSource, + sub-classes are: AmazonRedshiftSource, GoogleAdWordsSource, + OracleServiceCloudSource, DynamicsAXSource, ResponsysSource, SalesforceMarketingCloudSource, VerticaSource, NetezzaSource, ZohoSource, XeroSource, SquareSource, SparkSource, ShopifySource, ServiceNowSource, QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource, - MariaDBSource, MagentoSource, JiraSource, ImpalaSource, HubspotSource, - HiveSource, HBaseSource, GreenplumSource, GoogleBigQuerySource, - EloquaSource, DrillSource, CouchbaseSource, ConcurSource, - AzurePostgreSqlSource, AmazonMWSSource, HttpSource, - AzureDataLakeStoreSource, MongoDbSource, CassandraSource, WebSource, - OracleSource, AzureMySqlSource, HdfsSource, FileSystemSource, SqlDWSource, - SqlSource, SapEccSource, SapCloudForCustomerSource, SalesforceSource, - RelationalSource, DynamicsSource, DocumentDbCollectionSource, BlobSource, - AzureTableSource + AzureMariaDBSource, MariaDBSource, MagentoSource, JiraSource, ImpalaSource, + HubspotSource, HiveSource, HBaseSource, GreenplumSource, + GoogleBigQuerySource, EloquaSource, DrillSource, CouchbaseSource, + ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, + AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, + CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, CassandraSource, + WebSource, TeradataSource, OracleSource, AzureDataExplorerSource, + AzureMySqlSource, HdfsSource, FileSystemSource, SqlDWSource, SqlMISource, + AzureSqlSource, SqlServerSource, SqlSource, RestSource, SapTableSource, + SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, + SalesforceServiceCloudSource, SalesforceSource, ODataSource, SapBwSource, + SybaseSource, PostgreSqlSource, MySqlSource, OdbcSource, Db2Source, + MicrosoftAccessSource, InformixSource, RelationalSource, + CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, + DocumentDbCollectionSource, BlobSource, AzureTableSource, BinarySource, + JsonSource, DelimitedTextSource, ParquetSource, AvroSource All required parameters must be populated in order to send to Azure. @@ -42,6 +50,10 @@ class CopySource(Model): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ @@ -54,11 +66,12 @@ class CopySource(Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlSource': 'SqlSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} } def __init__(self, **kwargs): @@ -66,4 +79,5 @@ def __init__(self, **kwargs): self.additional_properties = kwargs.get('additional_properties', None) self.source_retry_count = kwargs.get('source_retry_count', None) self.source_retry_wait = kwargs.get('source_retry_wait', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py index 7c1a96b2897a..22b8c590498e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py @@ -16,19 +16,27 @@ class CopySource(Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonRedshiftSource, ResponsysSource, + sub-classes are: AmazonRedshiftSource, GoogleAdWordsSource, + OracleServiceCloudSource, DynamicsAXSource, ResponsysSource, SalesforceMarketingCloudSource, VerticaSource, NetezzaSource, ZohoSource, XeroSource, SquareSource, SparkSource, ShopifySource, ServiceNowSource, QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource, - MariaDBSource, MagentoSource, JiraSource, ImpalaSource, HubspotSource, - HiveSource, HBaseSource, GreenplumSource, GoogleBigQuerySource, - EloquaSource, DrillSource, CouchbaseSource, ConcurSource, - AzurePostgreSqlSource, AmazonMWSSource, HttpSource, - AzureDataLakeStoreSource, MongoDbSource, CassandraSource, WebSource, - OracleSource, AzureMySqlSource, HdfsSource, FileSystemSource, SqlDWSource, - SqlSource, SapEccSource, SapCloudForCustomerSource, SalesforceSource, - RelationalSource, DynamicsSource, DocumentDbCollectionSource, BlobSource, - AzureTableSource + AzureMariaDBSource, MariaDBSource, MagentoSource, JiraSource, ImpalaSource, + HubspotSource, HiveSource, HBaseSource, GreenplumSource, + GoogleBigQuerySource, EloquaSource, DrillSource, CouchbaseSource, + ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, + AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, + CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, CassandraSource, + WebSource, TeradataSource, OracleSource, AzureDataExplorerSource, + AzureMySqlSource, HdfsSource, FileSystemSource, SqlDWSource, SqlMISource, + AzureSqlSource, SqlServerSource, SqlSource, RestSource, SapTableSource, + SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, + SalesforceServiceCloudSource, SalesforceSource, ODataSource, SapBwSource, + SybaseSource, PostgreSqlSource, MySqlSource, OdbcSource, Db2Source, + MicrosoftAccessSource, InformixSource, RelationalSource, + CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, + DocumentDbCollectionSource, BlobSource, AzureTableSource, BinarySource, + JsonSource, DelimitedTextSource, ParquetSource, AvroSource All required parameters must be populated in order to send to Azure. @@ -42,6 +50,10 @@ class CopySource(Model): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ @@ -54,16 +66,18 @@ class CopySource(Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlSource': 'SqlSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: super(CopySource, self).__init__(**kwargs) self.additional_properties = additional_properties self.source_retry_count = source_retry_count self.source_retry_wait = source_retry_wait + self.max_concurrent_connections = max_concurrent_connections self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py index ed9136eee5fe..6a8a462f6f46 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py @@ -29,7 +29,7 @@ class CosmosDbLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py index 3b951a68a65a..57dab80e06b9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py @@ -29,7 +29,7 @@ class CosmosDbLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py new file mode 100644 index 000000000000..d86648eb5eee --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class CosmosDbMongoDbApiCollectionDataset(Dataset): + """The CosmosDB (MongoDB API) database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the CosmosDB (MongoDB + API) database. Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiCollectionDataset, self).__init__(**kwargs) + self.collection = kwargs.get('collection', None) + self.type = 'CosmosDbMongoDbApiCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py new file mode 100644 index 000000000000..de2650fa14b4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class CosmosDbMongoDbApiCollectionDataset(Dataset): + """The CosmosDB (MongoDB API) database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the CosmosDB (MongoDB + API) database. Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, collection, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(CosmosDbMongoDbApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.collection = collection + self.type = 'CosmosDbMongoDbApiCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py new file mode 100644 index 000000000000..f76e7c5f5a41 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class CosmosDbMongoDbApiLinkedService(LinkedService): + """Linked service for CosmosDB (MongoDB API) data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The CosmosDB (MongoDB API) connection + string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the CosmosDB (MongoDB API) database + that you want to access. Type: string (or Expression with resultType + string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.database = kwargs.get('database', None) + self.type = 'CosmosDbMongoDbApi' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py new file mode 100644 index 000000000000..2a72bfce4ff2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class CosmosDbMongoDbApiLinkedService(LinkedService): + """Linked service for CosmosDB (MongoDB API) data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The CosmosDB (MongoDB API) connection + string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the CosmosDB (MongoDB API) database + that you want to access. Type: string (or Expression with resultType + string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.database = database + self.type = 'CosmosDbMongoDbApi' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py new file mode 100644 index 000000000000..0d40b52add80 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class CosmosDbMongoDbApiSink(CopySink): + """A copy activity sink for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Specifies whether the document with same key to be + overwritten (upsert) rather than throw exception (insert). The default + value is "insert". Type: string (or Expression with resultType string). + Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.type = 'CosmosDbMongoDbApiSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py new file mode 100644 index 000000000000..5db1ee5c9d36 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class CosmosDbMongoDbApiSink(CopySink): + """A copy activity sink for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Specifies whether the document with same key to be + overwritten (upsert) rather than throw exception (insert). The default + value is "insert". Type: string (or Expression with resultType string). + Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.type = 'CosmosDbMongoDbApiSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py new file mode 100644 index 000000000000..44dc7443427b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class CosmosDbMongoDbApiSource(CopySource): + """A copy activity source for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property's + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiSource, self).__init__(**kwargs) + self.filter = kwargs.get('filter', None) + self.cursor_methods = kwargs.get('cursor_methods', None) + self.batch_size = kwargs.get('batch_size', None) + self.type = 'CosmosDbMongoDbApiSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py new file mode 100644 index 000000000000..7d180f105abf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class CosmosDbMongoDbApiSource(CopySource): + """A copy activity source for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property's + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, **kwargs) -> None: + super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size + self.type = 'CosmosDbMongoDbApiSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py index f5c02a071718..76e45648f941 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py @@ -29,7 +29,7 @@ class CouchbaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py index 1507d6ab7b32..afe336f666de 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py @@ -29,7 +29,7 @@ class CouchbaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py index bfab638594a3..b355605417d1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py @@ -27,6 +27,10 @@ class CouchbaseSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class CouchbaseSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py index cc661253a13d..1358fc20feba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py @@ -27,6 +27,10 @@ class CouchbaseSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class CouchbaseSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'CouchbaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py index f7eceb72ff3b..01cfb7335d37 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py @@ -52,6 +52,9 @@ class CustomActivity(ExecutionActivity): custom activity has the full responsibility to consume and interpret the content defined. :type extended_properties: dict[str, object] + :param retention_time_in_days: The retention time for the files submitted + for custom activity. Type: double (or Expression with resultType double). + :type retention_time_in_days: object """ _validation = { @@ -74,6 +77,7 @@ class CustomActivity(ExecutionActivity): 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, + 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, } def __init__(self, **kwargs): @@ -83,4 +87,5 @@ def __init__(self, **kwargs): self.folder_path = kwargs.get('folder_path', None) self.reference_objects = kwargs.get('reference_objects', None) self.extended_properties = kwargs.get('extended_properties', None) + self.retention_time_in_days = kwargs.get('retention_time_in_days', None) self.type = 'Custom' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py index b82ac57bca4d..bf8326f053f2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py @@ -52,6 +52,9 @@ class CustomActivity(ExecutionActivity): custom activity has the full responsibility to consume and interpret the content defined. :type extended_properties: dict[str, object] + :param retention_time_in_days: The retention time for the files submitted + for custom activity. Type: double (or Expression with resultType double). + :type retention_time_in_days: object """ _validation = { @@ -74,13 +77,15 @@ class CustomActivity(ExecutionActivity): 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, + 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, } - def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, resource_linked_service=None, folder_path=None, reference_objects=None, extended_properties=None, **kwargs) -> None: + def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, resource_linked_service=None, folder_path=None, reference_objects=None, extended_properties=None, retention_time_in_days=None, **kwargs) -> None: super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.command = command self.resource_linked_service = resource_linked_service self.folder_path = folder_path self.reference_objects = reference_objects self.extended_properties = extended_properties + self.retention_time_in_days = retention_time_in_days self.type = 'Custom' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py index 4bc3a2863fc3..db14a05e7ad1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py @@ -29,7 +29,7 @@ class CustomDataSourceLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py index 2ec05f7a32d9..f7633ee28cbd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py @@ -29,7 +29,7 @@ class CustomDataSourceLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py index 8a6a8ac662a8..a242309c3fd1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py @@ -43,14 +43,13 @@ class CustomDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param type_properties: Required. Custom dataset properties. + :param type_properties: Custom dataset properties. :type type_properties: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'type_properties': {'required': True}, } _attribute_map = { diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py index da681e8360b8..c00dae2b2c56 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py @@ -43,14 +43,13 @@ class CustomDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param type_properties: Required. Custom dataset properties. + :param type_properties: Custom dataset properties. :type type_properties: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'type_properties': {'required': True}, } _attribute_map = { @@ -66,7 +65,7 @@ class CustomDataset(Dataset): 'type_properties': {'key': 'typeProperties', 'type': 'object'}, } - def __init__(self, *, linked_service_name, type_properties, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, type_properties=None, **kwargs) -> None: super(CustomDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type_properties = type_properties self.type = 'CustomDataset' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py index 2992964b1799..8b3e08f32768 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py @@ -65,6 +65,15 @@ class TriggerRuntimeState(str, Enum): disabled = "Disabled" +class EventSubscriptionStatus(str, Enum): + + enabled = "Enabled" + provisioning = "Provisioning" + deprovisioning = "Deprovisioning" + disabled = "Disabled" + unknown = "Unknown" + + class RunQueryFilterOperand(str, Enum): pipeline_name = "PipelineName" @@ -77,6 +86,8 @@ class RunQueryFilterOperand(str, Enum): activity_type = "ActivityType" trigger_name = "TriggerName" trigger_run_timestamp = "TriggerRunTimestamp" + run_group_id = "RunGroupId" + latest_only = "LatestOnly" class RunQueryFilterOperator(str, Enum): @@ -158,6 +169,12 @@ class RecurrenceFrequency(str, Enum): year = "Year" +class GoogleAdWordsAuthenticationType(str, Enum): + + service_authentication = "ServiceAuthentication" + user_authentication = "UserAuthentication" + + class SparkServerType(str, Enum): shark_server = "SharkServer" @@ -267,6 +284,14 @@ class HttpAuthenticationType(str, Enum): client_certificate = "ClientCertificate" +class RestServiceAuthenticationType(str, Enum): + + anonymous = "Anonymous" + basic = "Basic" + aad_service_principal = "AadServicePrincipal" + managed_service_identity = "ManagedServiceIdentity" + + class MongoDbAuthenticationType(str, Enum): basic = "Basic" @@ -277,6 +302,15 @@ class ODataAuthenticationType(str, Enum): basic = "Basic" anonymous = "Anonymous" + windows = "Windows" + aad_service_principal = "AadServicePrincipal" + managed_service_identity = "ManagedServiceIdentity" + + +class ODataAadServicePrincipalCredentialType(str, Enum): + + service_principal_key = "ServicePrincipalKey" + service_principal_cert = "ServicePrincipalCert" class TeradataAuthenticationType(str, Enum): @@ -296,16 +330,25 @@ class SybaseAuthenticationType(str, Enum): windows = "Windows" -class DatasetCompressionLevel(str, Enum): +class DynamicsDeploymentType(str, Enum): - optimal = "Optimal" - fastest = "Fastest" + online = "Online" + on_premises_with_ifd = "OnPremisesWithIfd" -class JsonFormatFilePattern(str, Enum): +class DynamicsAuthenticationType(str, Enum): + + office365 = "Office365" + ifd = "Ifd" - set_of_objects = "setOfObjects" - array_of_objects = "arrayOfObjects" + +class AvroCompressionCodec(str, Enum): + + none = "none" + deflate = "deflate" + snappy = "snappy" + xz = "xz" + bzip2 = "bzip2" class AzureFunctionActivityMethod(str, Enum): @@ -327,6 +370,13 @@ class WebActivityMethod(str, Enum): delete = "DELETE" +class NetezzaPartitionOption(str, Enum): + + none = "None" + data_slice = "DataSlice" + dynamic_range = "DynamicRange" + + class CassandraSourceReadConsistencyLevels(str, Enum): all = "ALL" @@ -341,22 +391,53 @@ class CassandraSourceReadConsistencyLevels(str, Enum): local_serial = "LOCAL_SERIAL" +class TeradataPartitionOption(str, Enum): + + none = "None" + hash = "Hash" + dynamic_range = "DynamicRange" + + +class OraclePartitionOption(str, Enum): + + none = "None" + physical_partitions_of_table = "PhysicalPartitionsOfTable" + dynamic_range = "DynamicRange" + + class StoredProcedureParameterType(str, Enum): string = "String" int_enum = "Int" + int64 = "Int64" decimal_enum = "Decimal" guid = "Guid" boolean = "Boolean" date_enum = "Date" +class SapTablePartitionOption(str, Enum): + + none = "None" + partition_on_int = "PartitionOnInt" + partition_on_calendar_year = "PartitionOnCalendarYear" + partition_on_calendar_month = "PartitionOnCalendarMonth" + partition_on_calendar_date = "PartitionOnCalendarDate" + partition_on_time = "PartitionOnTime" + + class SalesforceSourceReadBehavior(str, Enum): query = "Query" query_all = "QueryAll" +class SsisPackageLocationType(str, Enum): + + ssisdb = "SSISDB" + file = "File" + + class HDInsightActivityDebugInfoOption(str, Enum): none = "None" @@ -376,25 +457,29 @@ class AzureSearchIndexWriteBehaviorType(str, Enum): upload = "Upload" -class CopyBehaviorType(str, Enum): - - preserve_hierarchy = "PreserveHierarchy" - flatten_hierarchy = "FlattenHierarchy" - merge_files = "MergeFiles" - - class PolybaseSettingsRejectType(str, Enum): value = "value" percentage = "percentage" +class JsonWriteFilePattern(str, Enum): + + set_of_objects = "setOfObjects" + array_of_objects = "arrayOfObjects" + + class SapCloudForCustomerSinkWriteBehavior(str, Enum): insert = "Insert" update = "Update" +class WebHookActivityMethod(str, Enum): + + post = "POST" + + class IntegrationRuntimeType(str, Enum): managed = "Managed" @@ -434,6 +519,12 @@ class ManagedIntegrationRuntimeNodeStatus(str, Enum): unavailable = "Unavailable" +class IntegrationRuntimeEntityReferenceType(str, Enum): + + integration_runtime_reference = "IntegrationRuntimeReference" + linked_service_reference = "LinkedServiceReference" + + class IntegrationRuntimeSsisCatalogPricingTier(str, Enum): basic = "Basic" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py index de812815bd26..e8e2974b4481 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py @@ -17,26 +17,37 @@ class Dataset(Model): data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ResponsysObjectDataset, - SalesforceMarketingCloudObjectDataset, VerticaTableDataset, - NetezzaTableDataset, ZohoObjectDataset, XeroObjectDataset, - SquareObjectDataset, SparkObjectDataset, ShopifyObjectDataset, - ServiceNowObjectDataset, QuickBooksObjectDataset, PrestoObjectDataset, - PhoenixObjectDataset, PaypalObjectDataset, MarketoObjectDataset, - MariaDBTableDataset, MagentoObjectDataset, JiraObjectDataset, - ImpalaObjectDataset, HubspotObjectDataset, HiveObjectDataset, - HBaseObjectDataset, GreenplumTableDataset, GoogleBigQueryObjectDataset, - EloquaObjectDataset, DrillTableDataset, CouchbaseTableDataset, - ConcurObjectDataset, AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, - HttpDataset, AzureSearchIndexDataset, WebTableDataset, - SqlServerTableDataset, SapEccResourceDataset, - SapCloudForCustomerResourceDataset, SalesforceObjectDataset, - RelationalTableDataset, AzureMySqlTableDataset, OracleTableDataset, - ODataResourceDataset, MongoDbCollectionDataset, FileShareDataset, - AzureDataLakeStoreDataset, DynamicsEntityDataset, - DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, - AzureSqlDWTableDataset, AzureSqlTableDataset, AzureTableDataset, - AzureBlobDataset, AmazonS3Dataset + sub-classes are: GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, + OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, + ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, + VerticaTableDataset, NetezzaTableDataset, ZohoObjectDataset, + XeroObjectDataset, SquareObjectDataset, SparkObjectDataset, + ShopifyObjectDataset, ServiceNowObjectDataset, QuickBooksObjectDataset, + PrestoObjectDataset, PhoenixObjectDataset, PaypalObjectDataset, + MarketoObjectDataset, AzureMariaDBTableDataset, MariaDBTableDataset, + MagentoObjectDataset, JiraObjectDataset, ImpalaObjectDataset, + HubspotObjectDataset, HiveObjectDataset, HBaseObjectDataset, + GreenplumTableDataset, GoogleBigQueryObjectDataset, EloquaObjectDataset, + DrillTableDataset, CouchbaseTableDataset, ConcurObjectDataset, + AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, HttpDataset, + AzureSearchIndexDataset, WebTableDataset, SapTableResourceDataset, + RestResourceDataset, SqlServerTableDataset, SapOpenHubTableDataset, + SapHanaTableDataset, SapEccResourceDataset, + SapCloudForCustomerResourceDataset, SapBwCubeDataset, SybaseTableDataset, + SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, + MicrosoftAccessTableDataset, PostgreSqlTableDataset, MySqlTableDataset, + OdbcTableDataset, InformixTableDataset, RelationalTableDataset, + Db2TableDataset, AmazonRedshiftTableDataset, AzureMySqlTableDataset, + TeradataTableDataset, OracleTableDataset, ODataResourceDataset, + CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, + MongoDbCollectionDataset, FileShareDataset, Office365Dataset, + AzureBlobFSDataset, AzureDataLakeStoreDataset, + CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, + DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, + CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, + AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, BinaryDataset, + JsonDataset, DelimitedTextDataset, ParquetDataset, AvroDataset, + AmazonS3Dataset All required parameters must be populated in order to send to Azure. @@ -86,7 +97,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py index c16c0611b364..9c97e2bfa5e3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py @@ -22,9 +22,8 @@ class DatasetDeflateCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str - :param level: The Deflate compression level. Possible values include: - 'Optimal', 'Fastest' - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The Deflate compression level. + :type level: object """ _validation = { @@ -34,7 +33,7 @@ class DatasetDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py index 715fe91a12a3..11d00081bc1c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py @@ -22,9 +22,8 @@ class DatasetDeflateCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str - :param level: The Deflate compression level. Possible values include: - 'Optimal', 'Fastest' - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The Deflate compression level. + :type level: object """ _validation = { @@ -34,7 +33,7 @@ class DatasetDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py index 48317d06f34e..4925127c7f0f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py @@ -22,9 +22,8 @@ class DatasetGZipCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str - :param level: The GZip compression level. Possible values include: - 'Optimal', 'Fastest' - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The GZip compression level. + :type level: object """ _validation = { @@ -34,7 +33,7 @@ class DatasetGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py index 99b1081469f8..97346e06366d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py @@ -22,9 +22,8 @@ class DatasetGZipCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str - :param level: The GZip compression level. Possible values include: - 'Optimal', 'Fastest' - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The GZip compression level. + :type level: object """ _validation = { @@ -34,7 +33,7 @@ class DatasetGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py new file mode 100644 index 000000000000..2c318a91cccb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetLocation(Model): + """Dataset location. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DatasetLocation, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py new file mode 100644 index 000000000000..d4e32d753197 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetLocation(Model): + """Dataset location. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(DatasetLocation, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.folder_path = folder_path + self.file_name = file_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py index 9538e6105a8f..82550c2a0df8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py @@ -17,26 +17,37 @@ class Dataset(Model): data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ResponsysObjectDataset, - SalesforceMarketingCloudObjectDataset, VerticaTableDataset, - NetezzaTableDataset, ZohoObjectDataset, XeroObjectDataset, - SquareObjectDataset, SparkObjectDataset, ShopifyObjectDataset, - ServiceNowObjectDataset, QuickBooksObjectDataset, PrestoObjectDataset, - PhoenixObjectDataset, PaypalObjectDataset, MarketoObjectDataset, - MariaDBTableDataset, MagentoObjectDataset, JiraObjectDataset, - ImpalaObjectDataset, HubspotObjectDataset, HiveObjectDataset, - HBaseObjectDataset, GreenplumTableDataset, GoogleBigQueryObjectDataset, - EloquaObjectDataset, DrillTableDataset, CouchbaseTableDataset, - ConcurObjectDataset, AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, - HttpDataset, AzureSearchIndexDataset, WebTableDataset, - SqlServerTableDataset, SapEccResourceDataset, - SapCloudForCustomerResourceDataset, SalesforceObjectDataset, - RelationalTableDataset, AzureMySqlTableDataset, OracleTableDataset, - ODataResourceDataset, MongoDbCollectionDataset, FileShareDataset, - AzureDataLakeStoreDataset, DynamicsEntityDataset, - DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, - AzureSqlDWTableDataset, AzureSqlTableDataset, AzureTableDataset, - AzureBlobDataset, AmazonS3Dataset + sub-classes are: GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, + OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, + ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, + VerticaTableDataset, NetezzaTableDataset, ZohoObjectDataset, + XeroObjectDataset, SquareObjectDataset, SparkObjectDataset, + ShopifyObjectDataset, ServiceNowObjectDataset, QuickBooksObjectDataset, + PrestoObjectDataset, PhoenixObjectDataset, PaypalObjectDataset, + MarketoObjectDataset, AzureMariaDBTableDataset, MariaDBTableDataset, + MagentoObjectDataset, JiraObjectDataset, ImpalaObjectDataset, + HubspotObjectDataset, HiveObjectDataset, HBaseObjectDataset, + GreenplumTableDataset, GoogleBigQueryObjectDataset, EloquaObjectDataset, + DrillTableDataset, CouchbaseTableDataset, ConcurObjectDataset, + AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, HttpDataset, + AzureSearchIndexDataset, WebTableDataset, SapTableResourceDataset, + RestResourceDataset, SqlServerTableDataset, SapOpenHubTableDataset, + SapHanaTableDataset, SapEccResourceDataset, + SapCloudForCustomerResourceDataset, SapBwCubeDataset, SybaseTableDataset, + SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, + MicrosoftAccessTableDataset, PostgreSqlTableDataset, MySqlTableDataset, + OdbcTableDataset, InformixTableDataset, RelationalTableDataset, + Db2TableDataset, AmazonRedshiftTableDataset, AzureMySqlTableDataset, + TeradataTableDataset, OracleTableDataset, ODataResourceDataset, + CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, + MongoDbCollectionDataset, FileShareDataset, Office365Dataset, + AzureBlobFSDataset, AzureDataLakeStoreDataset, + CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, + DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, + CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, + AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, BinaryDataset, + JsonDataset, DelimitedTextDataset, ParquetDataset, AvroDataset, + AmazonS3Dataset All required parameters must be populated in order to send to Azure. @@ -86,7 +97,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py index 9312098be5a3..ed80bf3cbcf2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py @@ -22,9 +22,8 @@ class DatasetZipDeflateCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str - :param level: The ZipDeflate compression level. Possible values include: - 'Optimal', 'Fastest' - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The ZipDeflate compression level. + :type level: object """ _validation = { @@ -34,7 +33,7 @@ class DatasetZipDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py index 74fbb92ce1ab..20abd6fe1088 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py @@ -22,9 +22,8 @@ class DatasetZipDeflateCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str - :param level: The ZipDeflate compression level. Possible values include: - 'Optimal', 'Fastest' - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The ZipDeflate compression level. + :type level: object """ _validation = { @@ -34,7 +33,7 @@ class DatasetZipDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py index 9349bbcba5e0..d163d2b93c18 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py @@ -29,7 +29,7 @@ class Db2LinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py index d339860c3229..44d784fa9bde 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py @@ -29,7 +29,7 @@ class Db2LinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source.py new file mode 100644 index 000000000000..a6e8c31ffa1f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class Db2Source(CopySource): + """A copy activity source for Db2 databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Db2Source, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'Db2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source_py3.py new file mode 100644 index 000000000000..20b169699ae0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class Db2Source(CopySource): + """A copy activity source for Db2 databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'Db2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset.py new file mode 100644 index 000000000000..7092d5fc6cb3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class Db2TableDataset(Dataset): + """The Db2 table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param db2_table_dataset_schema: The Db2 schema name. Type: string (or + Expression with resultType string). + :type db2_table_dataset_schema: object + :param table: The Db2 table name. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'db2_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Db2TableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.db2_table_dataset_schema = kwargs.get('db2_table_dataset_schema', None) + self.table = kwargs.get('table', None) + self.type = 'Db2Table' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset_py3.py new file mode 100644 index 000000000000..3fa296454a69 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class Db2TableDataset(Dataset): + """The Db2 table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param db2_table_dataset_schema: The Db2 schema name. Type: string (or + Expression with resultType string). + :type db2_table_dataset_schema: object + :param table: The Db2 table name. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'db2_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, db2_table_dataset_schema=None, table=None, **kwargs) -> None: + super(Db2TableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.db2_table_dataset_schema = db2_table_dataset_schema + self.table = table + self.type = 'Db2Table' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py new file mode 100644 index 000000000000..bfee26fcd12c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py @@ -0,0 +1,122 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class DelimitedTextDataset(Dataset): + """Delimited text dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the delimited text storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param column_delimiter: The column delimiter. Type: string (or Expression + with resultType string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with + resultType string). + :type row_delimiter: object + :param encoding_name: The code page name of the preferred encoding. If + miss, the default value is UTF-8, unless BOM denotes another Unicode + encoding. Refer to the name column of the table in the following link to + set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param compression_codec: + :type compression_codec: object + :param compression_level: The data compression method used for + DelimitedText. + :type compression_level: object + :param quote_char: The quote character. Type: string (or Expression with + resultType string). + :type quote_char: object + :param escape_char: The escape character. Type: string (or Expression with + resultType string). + :type escape_char: object + :param first_row_as_header: When used as input, treat the first row of + data as headers. When used as output,write the headers into the output as + the first row of data. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type first_row_as_header: object + :param null_value: The null value string. Type: string (or Expression with + resultType string). + :type null_value: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, + 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, + 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, + 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, + 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.column_delimiter = kwargs.get('column_delimiter', None) + self.row_delimiter = kwargs.get('row_delimiter', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.compression_codec = kwargs.get('compression_codec', None) + self.compression_level = kwargs.get('compression_level', None) + self.quote_char = kwargs.get('quote_char', None) + self.escape_char = kwargs.get('escape_char', None) + self.first_row_as_header = kwargs.get('first_row_as_header', None) + self.null_value = kwargs.get('null_value', None) + self.type = 'DelimitedText' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py new file mode 100644 index 000000000000..c2597e6a022b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py @@ -0,0 +1,122 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DelimitedTextDataset(Dataset): + """Delimited text dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the delimited text storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param column_delimiter: The column delimiter. Type: string (or Expression + with resultType string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with + resultType string). + :type row_delimiter: object + :param encoding_name: The code page name of the preferred encoding. If + miss, the default value is UTF-8, unless BOM denotes another Unicode + encoding. Refer to the name column of the table in the following link to + set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param compression_codec: + :type compression_codec: object + :param compression_level: The data compression method used for + DelimitedText. + :type compression_level: object + :param quote_char: The quote character. Type: string (or Expression with + resultType string). + :type quote_char: object + :param escape_char: The escape character. Type: string (or Expression with + resultType string). + :type escape_char: object + :param first_row_as_header: When used as input, treat the first row of + data as headers. When used as output,write the headers into the output as + the first row of data. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type first_row_as_header: object + :param null_value: The null value string. Type: string (or Expression with + resultType string). + :type null_value: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, + 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, + 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, + 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, + 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, column_delimiter=None, row_delimiter=None, encoding_name=None, compression_codec=None, compression_level=None, quote_char=None, escape_char=None, first_row_as_header=None, null_value=None, **kwargs) -> None: + super(DelimitedTextDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.column_delimiter = column_delimiter + self.row_delimiter = row_delimiter + self.encoding_name = encoding_name + self.compression_codec = compression_codec + self.compression_level = compression_level + self.quote_char = quote_char + self.escape_char = escape_char + self.first_row_as_header = first_row_as_header + self.null_value = null_value + self.type = 'DelimitedText' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings.py new file mode 100644 index 000000000000..364b103c426a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_read_settings import FormatReadSettings + + +class DelimitedTextReadSettings(FormatReadSettings): + """Delimited text read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param skip_line_count: Indicates the number of non-empty rows to skip + when reading data from input files. Type: integer (or Expression with + resultType integer). + :type skip_line_count: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextReadSettings, self).__init__(**kwargs) + self.skip_line_count = kwargs.get('skip_line_count', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings_py3.py new file mode 100644 index 000000000000..62aa0327cfb9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings_py3.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_read_settings_py3 import FormatReadSettings + + +class DelimitedTextReadSettings(FormatReadSettings): + """Delimited text read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param skip_line_count: Indicates the number of non-empty rows to skip + when reading data from input files. Type: integer (or Expression with + resultType integer). + :type skip_line_count: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, skip_line_count=None, **kwargs) -> None: + super(DelimitedTextReadSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.skip_line_count = skip_line_count diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py new file mode 100644 index 000000000000..15e0e590b4ee --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class DelimitedTextSink(CopySink): + """A copy activity DelimitedText sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + self.type = 'DelimitedTextSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py new file mode 100644 index 000000000000..6481f8021527 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class DelimitedTextSink(CopySink): + """A copy activity DelimitedText sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'DelimitedTextSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py new file mode 100644 index 000000000000..10a842ca374a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class DelimitedTextSource(CopySource): + """A copy activity DelimitedText source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + self.type = 'DelimitedTextSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py new file mode 100644 index 000000000000..e551e32c847e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DelimitedTextSource(CopySource): + """A copy activity DelimitedText source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'DelimitedTextSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings.py new file mode 100644 index 000000000000..5e0d8db319e5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_settings import FormatWriteSettings + + +class DelimitedTextWriteSettings(FormatWriteSettings): + """Delimited text write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param quote_all_text: Indicates whether string values should always be + enclosed with quotes. Type: boolean (or Expression with resultType + boolean). + :type quote_all_text: object + :param file_extension: Required. The file extension used to create the + files. Type: string (or Expression with resultType string). + :type file_extension: object + """ + + _validation = { + 'type': {'required': True}, + 'file_extension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, + 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextWriteSettings, self).__init__(**kwargs) + self.quote_all_text = kwargs.get('quote_all_text', None) + self.file_extension = kwargs.get('file_extension', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings_py3.py new file mode 100644 index 000000000000..2be019ab1e6a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_settings_py3 import FormatWriteSettings + + +class DelimitedTextWriteSettings(FormatWriteSettings): + """Delimited text write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param quote_all_text: Indicates whether string values should always be + enclosed with quotes. Type: boolean (or Expression with resultType + boolean). + :type quote_all_text: object + :param file_extension: Required. The file extension used to create the + files. Type: string (or Expression with resultType string). + :type file_extension: object + """ + + _validation = { + 'type': {'required': True}, + 'file_extension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, + 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + } + + def __init__(self, *, type: str, file_extension, additional_properties=None, quote_all_text=None, **kwargs) -> None: + super(DelimitedTextWriteSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.quote_all_text = quote_all_text + self.file_extension = file_extension diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py index 43253aff51d0..c2908dc1dd05 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py @@ -34,11 +34,18 @@ class DocumentDbCollectionSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). :type nesting_separator: object + :param write_behavior: Describes how to write data to Azure Cosmos DB. + Allowed values: insert and upsert. + :type write_behavior: object """ _validation = { @@ -51,11 +58,14 @@ class DocumentDbCollectionSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__(self, **kwargs): super(DocumentDbCollectionSink, self).__init__(**kwargs) self.nesting_separator = kwargs.get('nesting_separator', None) + self.write_behavior = kwargs.get('write_behavior', None) self.type = 'DocumentDbCollectionSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py index 5377d4ed5aa5..f1410cd211a4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py @@ -34,11 +34,18 @@ class DocumentDbCollectionSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). :type nesting_separator: object + :param write_behavior: Describes how to write data to Azure Cosmos DB. + Allowed values: insert and upsert. + :type write_behavior: object """ _validation = { @@ -51,11 +58,14 @@ class DocumentDbCollectionSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, nesting_separator=None, **kwargs) -> None: - super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, nesting_separator=None, write_behavior=None, **kwargs) -> None: + super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.nesting_separator = nesting_separator + self.write_behavior = write_behavior self.type = 'DocumentDbCollectionSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py index ac6bd77955c8..9fdd23f2795f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py @@ -27,6 +27,10 @@ class DocumentDbCollectionSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Documents query. Type: string (or Expression with resultType @@ -45,6 +49,7 @@ class DocumentDbCollectionSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py index 9c20bfbfa132..9e0bf6382b04 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py @@ -27,6 +27,10 @@ class DocumentDbCollectionSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Documents query. Type: string (or Expression with resultType @@ -45,13 +49,14 @@ class DocumentDbCollectionSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, nesting_separator=None, **kwargs) -> None: - super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, nesting_separator=None, **kwargs) -> None: + super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.nesting_separator = nesting_separator self.type = 'DocumentDbCollectionSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py index 52ad5888f5f2..c5428ace02a2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py @@ -29,7 +29,7 @@ class DrillLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py index b556d7e92be3..5fb0cb25ecdb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py @@ -29,7 +29,7 @@ class DrillLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py index c2e390308b81..9a3391f27786 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py @@ -27,6 +27,10 @@ class DrillSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class DrillSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py index ea67bbef64fb..313183abab83 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py @@ -27,6 +27,10 @@ class DrillSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class DrillSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'DrillSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py index c12b086b7824..3dfd5715deb9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py @@ -43,9 +43,15 @@ class DrillTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Drill. Type: string (or Expression + with resultType string). + :type table: object + :param drill_table_dataset_schema: The schema name of the Drill. Type: + string (or Expression with resultType string). + :type drill_table_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class DrillTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'drill_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, **kwargs): super(DrillTableDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.drill_table_dataset_schema = kwargs.get('drill_table_dataset_schema', None) self.type = 'DrillTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py index f4f5712f29e3..db46bdc4e0bd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py @@ -43,9 +43,15 @@ class DrillTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Drill. Type: string (or Expression + with resultType string). + :type table: object + :param drill_table_dataset_schema: The schema name of the Drill. Type: + string (or Expression with resultType string). + :type drill_table_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class DrillTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'drill_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, drill_table_dataset_schema=None, **kwargs) -> None: super(DrillTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.table = table + self.drill_table_dataset_schema = drill_table_dataset_schema self.type = 'DrillTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py new file mode 100644 index 000000000000..5ff0b150718b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class DynamicsAXLinkedService(LinkedService): + """Dynamics AX linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The Dynamics AX (or Dynamics 365 Finance and + Operations) instance OData endpoint. + :type url: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + Mark this field as a SecureString to store it securely in Data Factory, or + reference a secret stored in Azure Key Vault. Type: string (or Expression + with resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. Specify the tenant information (domain name or + tenant ID) under which your application resides. Retrieve it by hovering + the mouse in the top-right corner of the Azure portal. Type: string (or + Expression with resultType string). + :type tenant: object + :param aad_resource_id: Required. Specify the resource you are requesting + authorization. Type: string (or Expression with resultType string). + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'tenant': {'required': True}, + 'aad_resource_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsAXLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.aad_resource_id = kwargs.get('aad_resource_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'DynamicsAX' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py new file mode 100644 index 000000000000..79d3a34ba313 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class DynamicsAXLinkedService(LinkedService): + """Dynamics AX linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The Dynamics AX (or Dynamics 365 Finance and + Operations) instance OData endpoint. + :type url: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + Mark this field as a SecureString to store it securely in Data Factory, or + reference a secret stored in Azure Key Vault. Type: string (or Expression + with resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. Specify the tenant information (domain name or + tenant ID) under which your application resides. Retrieve it by hovering + the mouse in the top-right corner of the Azure portal. Type: string (or + Expression with resultType string). + :type tenant: object + :param aad_resource_id: Required. Specify the resource you are requesting + authorization. Type: string (or Expression with resultType string). + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'tenant': {'required': True}, + 'aad_resource_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, service_principal_id, service_principal_key, tenant, aad_resource_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, encrypted_credential=None, **kwargs) -> None: + super(DynamicsAXLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.aad_resource_id = aad_resource_id + self.encrypted_credential = encrypted_credential + self.type = 'DynamicsAX' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py new file mode 100644 index 000000000000..392b8ac7b971 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class DynamicsAXResourceDataset(Dataset): + """The path of the Dynamics AX OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the Dynamics AX OData entity. Type: + string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsAXResourceDataset, self).__init__(**kwargs) + self.path = kwargs.get('path', None) + self.type = 'DynamicsAXResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py new file mode 100644 index 000000000000..6cade3e4aa59 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DynamicsAXResourceDataset(Dataset): + """The path of the Dynamics AX OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the Dynamics AX OData entity. Type: + string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(DynamicsAXResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.path = path + self.type = 'DynamicsAXResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py new file mode 100644 index 000000000000..619bad0f75c9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class DynamicsAXSource(CopySource): + """A copy activity Dynamics AX source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsAXSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'DynamicsAXSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py new file mode 100644 index 000000000000..7679e68bae7b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DynamicsAXSource(CopySource): + """A copy activity Dynamics AX source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'DynamicsAXSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py new file mode 100644 index 000000000000..ff4079761cf0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class DynamicsCrmEntityDataset(Dataset): + """The Dynamics CRM entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsCrmEntityDataset, self).__init__(**kwargs) + self.entity_name = kwargs.get('entity_name', None) + self.type = 'DynamicsCrmEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py new file mode 100644 index 000000000000..4a1ef86b2dc6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DynamicsCrmEntityDataset(Dataset): + """The Dynamics CRM entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: + super(DynamicsCrmEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.entity_name = entity_name + self.type = 'DynamicsCrmEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py new file mode 100644 index 000000000000..aad71042bb04 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class DynamicsCrmLinkedService(LinkedService): + """Dynamics CRM linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Dynamics CRM + instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for + Dynamics CRM on-premises with Ifd. Type: string (or Expression with + resultType string). Possible values include: 'Online', 'OnPremisesWithIfd' + :type deployment_type: str or + ~azure.mgmt.datafactory.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Dynamics CRM server. + The property is required for on-prem and not allowed for online. Type: + string (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Dynamics CRM server. The property is + required for on-prem and not allowed for online. Default is 443. Type: + integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Dynamics CRM server. The + property is required for on-line and not allowed for on-prem. Type: string + (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Dynamics CRM + instance. The property is required for on-prem and required for online + when there are more than one Dynamics CRM instances associated with the + user. Type: string (or Expression with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for + on-premises with Ifd scenario. Type: string (or Expression with resultType + string). Possible values include: 'Office365', 'Ifd' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + :param username: Required. User name to access the Dynamics CRM instance. + Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Dynamics CRM instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsCrmLinkedService, self).__init__(**kwargs) + self.deployment_type = kwargs.get('deployment_type', None) + self.host_name = kwargs.get('host_name', None) + self.port = kwargs.get('port', None) + self.service_uri = kwargs.get('service_uri', None) + self.organization_name = kwargs.get('organization_name', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'DynamicsCrm' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py new file mode 100644 index 000000000000..2286301fabef --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class DynamicsCrmLinkedService(LinkedService): + """Dynamics CRM linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Dynamics CRM + instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for + Dynamics CRM on-premises with Ifd. Type: string (or Expression with + resultType string). Possible values include: 'Online', 'OnPremisesWithIfd' + :type deployment_type: str or + ~azure.mgmt.datafactory.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Dynamics CRM server. + The property is required for on-prem and not allowed for online. Type: + string (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Dynamics CRM server. The property is + required for on-prem and not allowed for online. Default is 443. Type: + integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Dynamics CRM server. The + property is required for on-line and not allowed for on-prem. Type: string + (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Dynamics CRM + instance. The property is required for on-prem and required for online + when there are more than one Dynamics CRM instances associated with the + user. Type: string (or Expression with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for + on-premises with Ifd scenario. Type: string (or Expression with resultType + string). Possible values include: 'Office365', 'Ifd' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + :param username: Required. User name to access the Dynamics CRM instance. + Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Dynamics CRM instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(DynamicsCrmLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'DynamicsCrm' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py new file mode 100644 index 000000000000..2d0f462e0f59 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class DynamicsCrmSink(CopySink): + """A copy activity Dynamics CRM sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether to ignore null + values from input dataset (except key fields) during write operation. + Default is false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, **kwargs): + super(DynamicsCrmSink, self).__init__(**kwargs) + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.type = 'DynamicsCrmSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py new file mode 100644 index 000000000000..d9f4fcf092c8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class DynamicsCrmSink(CopySink): + """A copy activity Dynamics CRM sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether to ignore null + values from input dataset (except key fields) during write operation. + Default is false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.ignore_null_values = ignore_null_values + self.type = 'DynamicsCrmSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py new file mode 100644 index 000000000000..641fad43f437 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class DynamicsCrmSource(CopySource): + """A copy activity Dynamics CRM source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression + with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsCrmSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'DynamicsCrmSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py new file mode 100644 index 000000000000..29c3e78609a5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DynamicsCrmSource(CopySource): + """A copy activity Dynamics CRM source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression + with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'DynamicsCrmSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py index 50ec75f79523..c925033d1240 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py @@ -29,7 +29,7 @@ class DynamicsLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py index 4971dabfba16..07c028ff2477 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py @@ -29,7 +29,7 @@ class DynamicsLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py index 7eb8be963583..45bac7b52064 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py @@ -37,6 +37,10 @@ class DynamicsSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. @@ -59,6 +63,7 @@ class DynamicsSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py index 2e2a64169797..5f736f9cf658 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py @@ -37,6 +37,10 @@ class DynamicsSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. @@ -59,6 +63,7 @@ class DynamicsSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -66,7 +71,7 @@ class DynamicsSink(CopySink): write_behavior = "Upsert" - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, ignore_null_values=None, **kwargs) -> None: - super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.ignore_null_values = ignore_null_values self.type = 'DynamicsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py index 09c04a8d09a6..d38f96fee911 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py @@ -27,6 +27,10 @@ class DynamicsSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: FetchXML is a proprietary query language that is used in @@ -43,6 +47,7 @@ class DynamicsSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py index 9c921cf40f3a..12d83625bc6a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py @@ -27,6 +27,10 @@ class DynamicsSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: FetchXML is a proprietary query language that is used in @@ -43,11 +47,12 @@ class DynamicsSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'DynamicsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py index ae14064ae523..6249c2e2334b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py @@ -29,7 +29,7 @@ class EloquaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py index 1c6bd97ecf9d..623d798036a3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py @@ -29,7 +29,7 @@ class EloquaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py index 694282ebcd8a..f016140189f1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py @@ -27,6 +27,10 @@ class EloquaSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class EloquaSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py index c9d96711743f..d200ff32fd9d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py @@ -27,6 +27,10 @@ class EloquaSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class EloquaSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'EloquaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py new file mode 100644 index 000000000000..5db1448a5a55 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class EntityReference(Model): + """The entity reference. + + :param type: The type of this referenced entity. Possible values include: + 'IntegrationRuntimeReference', 'LinkedServiceReference' + :type type: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType + :param reference_name: The name of this referenced entity. + :type reference_name: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(EntityReference, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.reference_name = kwargs.get('reference_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py new file mode 100644 index 000000000000..f87698b67a64 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class EntityReference(Model): + """The entity reference. + + :param type: The type of this referenced entity. Possible values include: + 'IntegrationRuntimeReference', 'LinkedServiceReference' + :type type: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType + :param reference_name: The name of this referenced entity. + :type reference_name: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__(self, *, type=None, reference_name: str=None, **kwargs) -> None: + super(EntityReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py index 3ea2abd2e734..9efa853dac86 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py @@ -73,6 +73,8 @@ class ExecuteSSISPackageActivity(ExecutionActivity): package. :type property_overrides: dict[str, ~azure.mgmt.datafactory.models.SSISPropertyOverride] + :param log_location: SSIS package execution log location. + :type log_location: ~azure.mgmt.datafactory.models.SSISLogLocation """ _validation = { @@ -102,6 +104,7 @@ class ExecuteSSISPackageActivity(ExecutionActivity): 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, + 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, } def __init__(self, **kwargs): @@ -117,4 +120,5 @@ def __init__(self, **kwargs): self.project_connection_managers = kwargs.get('project_connection_managers', None) self.package_connection_managers = kwargs.get('package_connection_managers', None) self.property_overrides = kwargs.get('property_overrides', None) + self.log_location = kwargs.get('log_location', None) self.type = 'ExecuteSSISPackage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py index fb72bacf03d9..64efa9cd63ac 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py @@ -73,6 +73,8 @@ class ExecuteSSISPackageActivity(ExecutionActivity): package. :type property_overrides: dict[str, ~azure.mgmt.datafactory.models.SSISPropertyOverride] + :param log_location: SSIS package execution log location. + :type log_location: ~azure.mgmt.datafactory.models.SSISLogLocation """ _validation = { @@ -102,9 +104,10 @@ class ExecuteSSISPackageActivity(ExecutionActivity): 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, + 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, } - def __init__(self, *, name: str, package_location, connect_via, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, runtime=None, logging_level=None, environment_path=None, execution_credential=None, project_parameters=None, package_parameters=None, project_connection_managers=None, package_connection_managers=None, property_overrides=None, **kwargs) -> None: + def __init__(self, *, name: str, package_location, connect_via, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, runtime=None, logging_level=None, environment_path=None, execution_credential=None, project_parameters=None, package_parameters=None, project_connection_managers=None, package_connection_managers=None, property_overrides=None, log_location=None, **kwargs) -> None: super(ExecuteSSISPackageActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.package_location = package_location self.runtime = runtime @@ -117,4 +120,5 @@ def __init__(self, *, name: str, package_location, connect_via, additional_prope self.project_connection_managers = project_connection_managers self.package_connection_managers = package_connection_managers self.property_overrides = property_overrides + self.log_location = log_location self.type = 'ExecuteSSISPackage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py index aca89a009b8e..8c16eff2c753 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py @@ -20,8 +20,9 @@ class ExecutionActivity(Activity): DatabricksSparkJarActivity, DatabricksNotebookActivity, DataLakeAnalyticsUSQLActivity, AzureMLUpdateResourceActivity, AzureMLBatchExecutionActivity, GetMetadataActivity, WebActivity, - LookupActivity, DeleteActivity, SqlServerStoredProcedureActivity, - CustomActivity, ExecuteSSISPackageActivity, HDInsightSparkActivity, + LookupActivity, AzureDataExplorerCommandActivity, DeleteActivity, + SqlServerStoredProcedureActivity, CustomActivity, + ExecuteSSISPackageActivity, HDInsightSparkActivity, HDInsightStreamingActivity, HDInsightMapReduceActivity, HDInsightPigActivity, HDInsightHiveActivity, CopyActivity @@ -64,7 +65,7 @@ class ExecutionActivity(Activity): } _subtype_map = { - 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} + 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py index 7f3b452fc3f9..5deb58db81a7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py @@ -20,8 +20,9 @@ class ExecutionActivity(Activity): DatabricksSparkJarActivity, DatabricksNotebookActivity, DataLakeAnalyticsUSQLActivity, AzureMLUpdateResourceActivity, AzureMLBatchExecutionActivity, GetMetadataActivity, WebActivity, - LookupActivity, DeleteActivity, SqlServerStoredProcedureActivity, - CustomActivity, ExecuteSSISPackageActivity, HDInsightSparkActivity, + LookupActivity, AzureDataExplorerCommandActivity, DeleteActivity, + SqlServerStoredProcedureActivity, CustomActivity, + ExecuteSSISPackageActivity, HDInsightSparkActivity, HDInsightStreamingActivity, HDInsightMapReduceActivity, HDInsightPigActivity, HDInsightHiveActivity, CopyActivity @@ -64,7 +65,7 @@ class ExecutionActivity(Activity): } _subtype_map = { - 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} + 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} } def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py index c3c90f30935d..ffced5c2e689 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py @@ -29,7 +29,7 @@ class FileServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py index a9793d5b44fc..ec6fe58bb3a3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py @@ -29,7 +29,7 @@ class FileServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py new file mode 100644 index 000000000000..edce5fe68a65 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class FileServerLocation(DatasetLocation): + """The location of file server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileServerLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py new file mode 100644 index 000000000000..f7fb8354bcbc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class FileServerLocation(DatasetLocation): + """The location of file server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(FileServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings.py new file mode 100644 index 000000000000..da9d0809e03a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings import StoreReadSettings + + +class FileServerReadSettings(StoreReadSettings): + """File server read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: FileServer wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileServerReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings_py3.py new file mode 100644 index 000000000000..1fadb49b1795 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class FileServerReadSettings(StoreReadSettings): + """File server read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: FileServer wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings.py new file mode 100644 index 000000000000..e3bc7946d1ac --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_write_settings import StoreWriteSettings + + +class FileServerWriteSettings(StoreWriteSettings): + """File server write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(FileServerWriteSettings, self).__init__(**kwargs) + self.type = 'FileServerWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings_py3.py new file mode 100644 index 000000000000..b174cf537577 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_write_settings_py3 import StoreWriteSettings + + +class FileServerWriteSettings(StoreWriteSettings): + """File server write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.type = 'FileServerWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py index a851956ea319..6874f4c08929 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py @@ -49,6 +49,12 @@ class FileShareDataset(Dataset): :param file_name: The name of the on-premises file system. Type: string (or Expression with resultType string). :type file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object :param format: The format of the files. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param file_filter: Specify a filter to be used to select a subset of @@ -76,6 +82,8 @@ class FileShareDataset(Dataset): 'type': {'key': 'type', 'type': 'str'}, 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, 'file_filter': {'key': 'typeProperties.fileFilter', 'type': 'object'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, @@ -85,6 +93,8 @@ def __init__(self, **kwargs): super(FileShareDataset, self).__init__(**kwargs) self.folder_path = kwargs.get('folder_path', None) self.file_name = kwargs.get('file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) self.format = kwargs.get('format', None) self.file_filter = kwargs.get('file_filter', None) self.compression = kwargs.get('compression', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py index 675583ae2f2c..19e88a264e12 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py @@ -49,6 +49,12 @@ class FileShareDataset(Dataset): :param file_name: The name of the on-premises file system. Type: string (or Expression with resultType string). :type file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object :param format: The format of the files. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param file_filter: Specify a filter to be used to select a subset of @@ -76,15 +82,19 @@ class FileShareDataset(Dataset): 'type': {'key': 'type', 'type': 'str'}, 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, 'file_filter': {'key': 'typeProperties.fileFilter', 'type': 'object'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, file_filter=None, compression=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, file_filter=None, compression=None, **kwargs) -> None: super(FileShareDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.folder_path = folder_path self.file_name = file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end self.format = format self.file_filter = file_filter self.compression = compression diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py index 9f33cea7a261..8b8f238c9534 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py @@ -34,12 +34,14 @@ class FileSystemSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -52,8 +54,9 @@ class FileSystemSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py index a940e39878f8..24f8623cbb02 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py @@ -34,12 +34,14 @@ class FileSystemSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -52,11 +54,12 @@ class FileSystemSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, copy_behavior=None, **kwargs) -> None: - super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.copy_behavior = copy_behavior self.type = 'FileSystemSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py index 1bbf97f1b31d..2986b1848153 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py @@ -27,6 +27,10 @@ class FileSystemSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read @@ -43,6 +47,7 @@ class FileSystemSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py index 6db0072329d4..0598490ca51c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py @@ -27,6 +27,10 @@ class FileSystemSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read @@ -43,11 +47,12 @@ class FileSystemSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, recursive=None, **kwargs) -> None: - super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, **kwargs) -> None: + super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.type = 'FileSystemSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings.py new file mode 100644 index 000000000000..d5213138b96a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FormatReadSettings(Model): + """Format read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(FormatReadSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings_py3.py new file mode 100644 index 000000000000..326da0277b89 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FormatReadSettings(Model): + """Format read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, type: str, additional_properties=None, **kwargs) -> None: + super(FormatReadSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings.py new file mode 100644 index 000000000000..2100c6055d0d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FormatWriteSettings(Model): + """Format write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(FormatWriteSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings_py3.py new file mode 100644 index 000000000000..4150eceffc1c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FormatWriteSettings(Model): + """Format write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, type: str, additional_properties=None, **kwargs) -> None: + super(FormatWriteSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings.py new file mode 100644 index 000000000000..e023f9ae91f7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings import StoreReadSettings + + +class FtpReadSettings(StoreReadSettings): + """Ftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Ftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param use_binary_transfer: Specify whether to use binary transfer mode + for FTP stores. + :type use_binary_transfer: bool + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(FtpReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.use_binary_transfer = kwargs.get('use_binary_transfer', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings_py3.py new file mode 100644 index 000000000000..748d306307ac --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class FtpReadSettings(StoreReadSettings): + """Ftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Ftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param use_binary_transfer: Specify whether to use binary transfer mode + for FTP stores. + :type use_binary_transfer: bool + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, use_binary_transfer: bool=None, **kwargs) -> None: + super(FtpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.use_binary_transfer = use_binary_transfer diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py index 03a09f89c13e..e649ca56e37c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py @@ -29,7 +29,7 @@ class FtpServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py index 21fd1168165f..b38ad1c03f46 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py @@ -29,7 +29,7 @@ class FtpServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py new file mode 100644 index 000000000000..5d5e933036df --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class FtpServerLocation(DatasetLocation): + """The location of ftp server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FtpServerLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py new file mode 100644 index 000000000000..ac296bcfca31 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class FtpServerLocation(DatasetLocation): + """The location of ftp server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(FtpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py new file mode 100644 index 000000000000..c460dd95c380 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py @@ -0,0 +1,119 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class GoogleAdWordsLinkedService(LinkedService): + """Google AdWords service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_customer_id: Required. The Client customer ID of the AdWords + account that you want to fetch report data for. + :type client_customer_id: object + :param developer_token: Required. The developer token associated with the + manager account that you use to grant access to the AdWords API. + :type developer_token: ~azure.mgmt.datafactory.models.SecretBase + :param authentication_type: Required. The OAuth 2.0 authentication + mechanism used for authentication. ServiceAuthentication can only be used + on self-hosted IR. Possible values include: 'ServiceAuthentication', + 'UserAuthentication' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType + :param refresh_token: The refresh token obtained from Google for + authorizing access to AdWords for UserAuthentication. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id of the google application used to acquire + the refresh token. + :type client_id: ~azure.mgmt.datafactory.models.SecretBase + :param client_secret: The client secret of the google application used to + acquire the refresh token. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param email: The service account email ID that is used for + ServiceAuthentication and can only be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to + authenticate the service account email address and can only be used on + self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_customer_id': {'required': True}, + 'developer_token': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, + 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleAdWordsLinkedService, self).__init__(**kwargs) + self.client_customer_id = kwargs.get('client_customer_id', None) + self.developer_token = kwargs.get('developer_token', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.refresh_token = kwargs.get('refresh_token', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.email = kwargs.get('email', None) + self.key_file_path = kwargs.get('key_file_path', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'GoogleAdWords' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py new file mode 100644 index 000000000000..dfb3bc07e69f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py @@ -0,0 +1,119 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class GoogleAdWordsLinkedService(LinkedService): + """Google AdWords service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_customer_id: Required. The Client customer ID of the AdWords + account that you want to fetch report data for. + :type client_customer_id: object + :param developer_token: Required. The developer token associated with the + manager account that you use to grant access to the AdWords API. + :type developer_token: ~azure.mgmt.datafactory.models.SecretBase + :param authentication_type: Required. The OAuth 2.0 authentication + mechanism used for authentication. ServiceAuthentication can only be used + on self-hosted IR. Possible values include: 'ServiceAuthentication', + 'UserAuthentication' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType + :param refresh_token: The refresh token obtained from Google for + authorizing access to AdWords for UserAuthentication. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id of the google application used to acquire + the refresh token. + :type client_id: ~azure.mgmt.datafactory.models.SecretBase + :param client_secret: The client secret of the google application used to + acquire the refresh token. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param email: The service account email ID that is used for + ServiceAuthentication and can only be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to + authenticate the service account email address and can only be used on + self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_customer_id': {'required': True}, + 'developer_token': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, + 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, client_customer_id, developer_token, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, refresh_token=None, client_id=None, client_secret=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None, **kwargs) -> None: + super(GoogleAdWordsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.client_customer_id = client_customer_id + self.developer_token = developer_token + self.authentication_type = authentication_type + self.refresh_token = refresh_token + self.client_id = client_id + self.client_secret = client_secret + self.email = email + self.key_file_path = key_file_path + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.encrypted_credential = encrypted_credential + self.type = 'GoogleAdWords' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py new file mode 100644 index 000000000000..92b901b774ed --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class GoogleAdWordsObjectDataset(Dataset): + """Google AdWords service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleAdWordsObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'GoogleAdWordsObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py new file mode 100644 index 000000000000..e1272f978b8e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class GoogleAdWordsObjectDataset(Dataset): + """Google AdWords service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(GoogleAdWordsObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'GoogleAdWordsObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py new file mode 100644 index 000000000000..8699057abe09 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class GoogleAdWordsSource(CopySource): + """A copy activity Google AdWords service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleAdWordsSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'GoogleAdWordsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py new file mode 100644 index 000000000000..995d5324670b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class GoogleAdWordsSource(CopySource): + """A copy activity Google AdWords service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'GoogleAdWordsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py index c9fa8239b452..45a535b95d43 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py @@ -29,7 +29,7 @@ class GoogleBigQueryLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py index a8582aca98b5..146674a85531 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py @@ -29,7 +29,7 @@ class GoogleBigQueryLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py index 5750875dc3a0..920489742bbf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py @@ -43,9 +43,15 @@ class GoogleBigQueryObjectDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + database + table properties instead. :type table_name: object + :param table: The table name of the Google BigQuery. Type: string (or + Expression with resultType string). + :type table: object + :param dataset: The database name of the Google BigQuery. Type: string (or + Expression with resultType string). + :type dataset: object """ _validation = { @@ -64,9 +70,13 @@ class GoogleBigQueryObjectDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, } def __init__(self, **kwargs): super(GoogleBigQueryObjectDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.dataset = kwargs.get('dataset', None) self.type = 'GoogleBigQueryObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py index 625cd068b731..205819f8eeef 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py @@ -43,9 +43,15 @@ class GoogleBigQueryObjectDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + database + table properties instead. :type table_name: object + :param table: The table name of the Google BigQuery. Type: string (or + Expression with resultType string). + :type table: object + :param dataset: The database name of the Google BigQuery. Type: string (or + Expression with resultType string). + :type dataset: object """ _validation = { @@ -64,9 +70,13 @@ class GoogleBigQueryObjectDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, dataset=None, **kwargs) -> None: super(GoogleBigQueryObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.table = table + self.dataset = dataset self.type = 'GoogleBigQueryObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py index c0598d88a6ed..3a28d2563a8b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py @@ -27,6 +27,10 @@ class GoogleBigQuerySource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class GoogleBigQuerySource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py index eb5727bd43a5..49364b4d0e3f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py @@ -27,6 +27,10 @@ class GoogleBigQuerySource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class GoogleBigQuerySource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'GoogleBigQuerySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py index d3de7ccab502..57913f779ca1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py @@ -29,7 +29,7 @@ class GreenplumLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py index 886d38718ecd..bd707a5e85c9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py @@ -29,7 +29,7 @@ class GreenplumLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py index a463ff2c3482..086f12419f4a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py @@ -27,6 +27,10 @@ class GreenplumSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class GreenplumSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py index 6a373bf9d6ae..8b789deb43da 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py @@ -27,6 +27,10 @@ class GreenplumSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class GreenplumSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'GreenplumSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py index fa4a066f11a9..eb0ea08ee544 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py @@ -43,9 +43,15 @@ class GreenplumTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of Greenplum. Type: string (or Expression + with resultType string). + :type table: object + :param greenplum_table_dataset_schema: The schema name of Greenplum. Type: + string (or Expression with resultType string). + :type greenplum_table_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class GreenplumTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'greenplum_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, **kwargs): super(GreenplumTableDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.greenplum_table_dataset_schema = kwargs.get('greenplum_table_dataset_schema', None) self.type = 'GreenplumTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py index 7c698db22339..7f37fff9108d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py @@ -43,9 +43,15 @@ class GreenplumTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of Greenplum. Type: string (or Expression + with resultType string). + :type table: object + :param greenplum_table_dataset_schema: The schema name of Greenplum. Type: + string (or Expression with resultType string). + :type greenplum_table_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class GreenplumTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'greenplum_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, greenplum_table_dataset_schema=None, **kwargs) -> None: super(GreenplumTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.table = table + self.greenplum_table_dataset_schema = greenplum_table_dataset_schema self.type = 'GreenplumTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py index 4d7f3bf5ccb6..b6affd5caa0d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py @@ -29,7 +29,7 @@ class HBaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py index 7963b3fc643c..a8823e2e8937 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py @@ -29,7 +29,7 @@ class HBaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py index cc2c4fd1a843..eb6e3f1789bb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py @@ -27,6 +27,10 @@ class HBaseSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class HBaseSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py index c17d8cf07003..b2680e95c212 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py @@ -27,6 +27,10 @@ class HBaseSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class HBaseSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'HBaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py index b18a138a855e..810525342d82 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py @@ -29,7 +29,7 @@ class HDInsightLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -55,6 +55,10 @@ class HDInsightLinkedService(LinkedService): :param is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security Package). Type: Boolean. :type is_esp_enabled: object + :param file_system: Specify the FileSystem if the main storage for the + HDInsight is ADLS Gen2. Type: string (or Expression with resultType + string). + :type file_system: object """ _validation = { @@ -76,6 +80,7 @@ class HDInsightLinkedService(LinkedService): 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, + 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, } def __init__(self, **kwargs): @@ -87,4 +92,5 @@ def __init__(self, **kwargs): self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.is_esp_enabled = kwargs.get('is_esp_enabled', None) + self.file_system = kwargs.get('file_system', None) self.type = 'HDInsight' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py index 769cf031a403..5c384f7d6288 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py @@ -29,7 +29,7 @@ class HDInsightLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -55,6 +55,10 @@ class HDInsightLinkedService(LinkedService): :param is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security Package). Type: Boolean. :type is_esp_enabled: object + :param file_system: Specify the FileSystem if the main storage for the + HDInsight is ADLS Gen2. Type: string (or Expression with resultType + string). + :type file_system: object """ _validation = { @@ -76,9 +80,10 @@ class HDInsightLinkedService(LinkedService): 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, + 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, } - def __init__(self, *, cluster_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, linked_service_name=None, hcatalog_linked_service_name=None, encrypted_credential=None, is_esp_enabled=None, **kwargs) -> None: + def __init__(self, *, cluster_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, linked_service_name=None, hcatalog_linked_service_name=None, encrypted_credential=None, is_esp_enabled=None, file_system=None, **kwargs) -> None: super(HDInsightLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.cluster_uri = cluster_uri self.user_name = user_name @@ -87,4 +92,5 @@ def __init__(self, *, cluster_uri, additional_properties=None, connect_via=None, self.hcatalog_linked_service_name = hcatalog_linked_service_name self.encrypted_credential = encrypted_credential self.is_esp_enabled = is_esp_enabled + self.file_system = file_system self.type = 'HDInsight' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py index bd84aabc5012..d386aac9d9aa 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py @@ -29,7 +29,7 @@ class HDInsightOnDemandLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -136,6 +136,14 @@ class HDInsightOnDemandLinkedService(LinkedService): cluster once it's up. Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] + :param virtual_network_id: The ARM resource ID for the vNet to which the + cluster should be joined after creation. Type: string (or Expression with + resultType string). + :type virtual_network_id: object + :param subnet_name: The ARM resource ID for the subnet in the vNet. If + virtualNetworkId was specified, then this property is required. Type: + string (or Expression with resultType string). + :type subnet_name: object """ _validation = { @@ -187,6 +195,8 @@ class HDInsightOnDemandLinkedService(LinkedService): 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, + 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, + 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, } def __init__(self, **kwargs): @@ -222,4 +232,6 @@ def __init__(self, **kwargs): self.data_node_size = kwargs.get('data_node_size', None) self.zookeeper_node_size = kwargs.get('zookeeper_node_size', None) self.script_actions = kwargs.get('script_actions', None) + self.virtual_network_id = kwargs.get('virtual_network_id', None) + self.subnet_name = kwargs.get('subnet_name', None) self.type = 'HDInsightOnDemand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py index 5566a022bda2..178585c9b51d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py @@ -29,7 +29,7 @@ class HDInsightOnDemandLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -136,6 +136,14 @@ class HDInsightOnDemandLinkedService(LinkedService): cluster once it's up. Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] + :param virtual_network_id: The ARM resource ID for the vNet to which the + cluster should be joined after creation. Type: string (or Expression with + resultType string). + :type virtual_network_id: object + :param subnet_name: The ARM resource ID for the subnet in the vNet. If + virtualNetworkId was specified, then this property is required. Type: + string (or Expression with resultType string). + :type subnet_name: object """ _validation = { @@ -187,9 +195,11 @@ class HDInsightOnDemandLinkedService(LinkedService): 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, + 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, + 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, } - def __init__(self, *, cluster_size, time_to_live, version, linked_service_name, host_subscription_id, tenant, cluster_resource_group, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, cluster_name_prefix=None, cluster_user_name=None, cluster_password=None, cluster_ssh_user_name=None, cluster_ssh_password=None, additional_linked_service_names=None, hcatalog_linked_service_name=None, cluster_type=None, spark_version=None, core_configuration=None, h_base_configuration=None, hdfs_configuration=None, hive_configuration=None, map_reduce_configuration=None, oozie_configuration=None, storm_configuration=None, yarn_configuration=None, encrypted_credential=None, head_node_size=None, data_node_size=None, zookeeper_node_size=None, script_actions=None, **kwargs) -> None: + def __init__(self, *, cluster_size, time_to_live, version, linked_service_name, host_subscription_id, tenant, cluster_resource_group, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, cluster_name_prefix=None, cluster_user_name=None, cluster_password=None, cluster_ssh_user_name=None, cluster_ssh_password=None, additional_linked_service_names=None, hcatalog_linked_service_name=None, cluster_type=None, spark_version=None, core_configuration=None, h_base_configuration=None, hdfs_configuration=None, hive_configuration=None, map_reduce_configuration=None, oozie_configuration=None, storm_configuration=None, yarn_configuration=None, encrypted_credential=None, head_node_size=None, data_node_size=None, zookeeper_node_size=None, script_actions=None, virtual_network_id=None, subnet_name=None, **kwargs) -> None: super(HDInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.cluster_size = cluster_size self.time_to_live = time_to_live @@ -222,4 +232,6 @@ def __init__(self, *, cluster_size, time_to_live, version, linked_service_name, self.data_node_size = data_node_size self.zookeeper_node_size = zookeeper_node_size self.script_actions = script_actions + self.virtual_network_id = virtual_network_id + self.subnet_name = subnet_name self.type = 'HDInsightOnDemand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py index ab26ae10fe8c..b527f05a7e2f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py @@ -29,7 +29,7 @@ class HdfsLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py index 3b854d945e27..e004701e1da0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py @@ -29,7 +29,7 @@ class HdfsLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py new file mode 100644 index 000000000000..a8f5d1ba332c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class HdfsLocation(DatasetLocation): + """The location of HDFS. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HdfsLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py new file mode 100644 index 000000000000..2e07575bef0f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class HdfsLocation(DatasetLocation): + """The location of HDFS. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(HdfsLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings.py new file mode 100644 index 000000000000..ec4b98c50385 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings import StoreReadSettings + + +class HdfsReadSettings(StoreReadSettings): + """HDFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: HDFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__(self, **kwargs): + super(HdfsReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.distcp_settings = kwargs.get('distcp_settings', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings_py3.py new file mode 100644 index 000000000000..c37a045ec93c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class HdfsReadSettings(StoreReadSettings): + """HDFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: HDFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, distcp_settings=None, **kwargs) -> None: + super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.distcp_settings = distcp_settings diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py index 1322a0e68cea..be50590f6c32 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py @@ -27,6 +27,10 @@ class HdfsSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read @@ -45,6 +49,7 @@ class HdfsSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py index 34b194f92d64..3c60cab46289 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py @@ -27,6 +27,10 @@ class HdfsSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read @@ -45,13 +49,14 @@ class HdfsSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, recursive=None, distcp_settings=None, **kwargs) -> None: - super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, distcp_settings=None, **kwargs) -> None: + super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.distcp_settings = distcp_settings self.type = 'HdfsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py index 57b40c30304a..c54c1393d56e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py @@ -29,7 +29,7 @@ class HiveLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py index 2f742d72594c..611d30ecb781 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py @@ -29,7 +29,7 @@ class HiveLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py index 7dc4fd367f8a..07b6f2b54901 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py @@ -43,9 +43,15 @@ class HiveObjectDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Hive. Type: string (or Expression with + resultType string). + :type table: object + :param hive_object_dataset_schema: The schema name of the Hive. Type: + string (or Expression with resultType string). + :type hive_object_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class HiveObjectDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'hive_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, **kwargs): super(HiveObjectDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.hive_object_dataset_schema = kwargs.get('hive_object_dataset_schema', None) self.type = 'HiveObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py index c007333721be..69384bdfa99a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py @@ -43,9 +43,15 @@ class HiveObjectDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Hive. Type: string (or Expression with + resultType string). + :type table: object + :param hive_object_dataset_schema: The schema name of the Hive. Type: + string (or Expression with resultType string). + :type hive_object_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class HiveObjectDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'hive_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, hive_object_dataset_schema=None, **kwargs) -> None: super(HiveObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.table = table + self.hive_object_dataset_schema = hive_object_dataset_schema self.type = 'HiveObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py index ad7cd5dc5a8a..3af88c3280e3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py @@ -27,6 +27,10 @@ class HiveSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class HiveSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py index 7dc54994b25a..6c09191b8c1b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py @@ -27,6 +27,10 @@ class HiveSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class HiveSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'HiveSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py index 86d6a072925e..6232bc45fee4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py @@ -29,7 +29,7 @@ class HttpLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py index bd4f03006513..7f70adb08425 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py @@ -29,7 +29,7 @@ class HttpLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings.py new file mode 100644 index 000000000000..a7c175da3489 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings import StoreReadSettings + + +class HttpReadSettings(StoreReadSettings): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param request_timeout: Specifies the timeout for a HTTP client to get + HTTP response from HTTP server. + :type request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HttpReadSettings, self).__init__(**kwargs) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.request_timeout = kwargs.get('request_timeout', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings_py3.py new file mode 100644 index 000000000000..7cea9207c996 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class HttpReadSettings(StoreReadSettings): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param request_timeout: Specifies the timeout for a HTTP client to get + HTTP response from HTTP server. + :type request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, request_timeout=None, **kwargs) -> None: + super(HttpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.request_timeout = request_timeout diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py new file mode 100644 index 000000000000..94106fae9d15 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class HttpServerLocation(DatasetLocation): + """The location of http server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param relative_url: Specify the relativeUrl of http server. Type: string + (or Expression with resultType string) + :type relative_url: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HttpServerLocation, self).__init__(**kwargs) + self.relative_url = kwargs.get('relative_url', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py new file mode 100644 index 000000000000..c52c53dcf357 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class HttpServerLocation(DatasetLocation): + """The location of http server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param relative_url: Specify the relativeUrl of http server. Type: string + (or Expression with resultType string) + :type relative_url: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, relative_url=None, **kwargs) -> None: + super(HttpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.relative_url = relative_url diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py index 8c4a6ef6b8d7..ae131aa16c8c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py @@ -27,6 +27,10 @@ class HttpSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param http_request_timeout: Specifies the timeout for a HTTP client to @@ -45,6 +49,7 @@ class HttpSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py index 78bfe7216da6..df339fc3aef7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py @@ -27,6 +27,10 @@ class HttpSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param http_request_timeout: Specifies the timeout for a HTTP client to @@ -45,11 +49,12 @@ class HttpSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, http_request_timeout=None, **kwargs) -> None: - super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, http_request_timeout=None, **kwargs) -> None: + super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.http_request_timeout = http_request_timeout self.type = 'HttpSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py index 08af04633c12..3d0d6cb3a6f4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py @@ -29,7 +29,7 @@ class HubspotLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py index 93f66cd8e17b..272d613e9cd1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py @@ -29,7 +29,7 @@ class HubspotLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py index bca6b525860c..b4b4c618c33e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py @@ -27,6 +27,10 @@ class HubspotSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class HubspotSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py index cfc2d2d815b5..a29811342ce0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py @@ -27,6 +27,10 @@ class HubspotSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class HubspotSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'HubspotSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py index fdc471ea225f..a704852652db 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py @@ -29,7 +29,7 @@ class ImpalaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py index 9d79f13b9708..55b2e0c861d7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py @@ -29,7 +29,7 @@ class ImpalaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py index d9bf591d8021..8faee4f09240 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py @@ -43,9 +43,15 @@ class ImpalaObjectDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Impala. Type: string (or Expression + with resultType string). + :type table: object + :param impala_object_dataset_schema: The schema name of the Impala. Type: + string (or Expression with resultType string). + :type impala_object_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class ImpalaObjectDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'impala_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, **kwargs): super(ImpalaObjectDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.impala_object_dataset_schema = kwargs.get('impala_object_dataset_schema', None) self.type = 'ImpalaObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py index d103603b2586..5652b5c9e4b0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py @@ -43,9 +43,15 @@ class ImpalaObjectDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Impala. Type: string (or Expression + with resultType string). + :type table: object + :param impala_object_dataset_schema: The schema name of the Impala. Type: + string (or Expression with resultType string). + :type impala_object_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class ImpalaObjectDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'impala_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, impala_object_dataset_schema=None, **kwargs) -> None: super(ImpalaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.table = table + self.impala_object_dataset_schema = impala_object_dataset_schema self.type = 'ImpalaObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py index dec8e843d0c6..9e27dbdb6266 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py @@ -27,6 +27,10 @@ class ImpalaSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class ImpalaSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py index 5bdb3391c2fc..f7dc4016d020 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py @@ -27,6 +27,10 @@ class ImpalaSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class ImpalaSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'ImpalaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py new file mode 100644 index 000000000000..2a58e7a0f7d3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class InformixLinkedService(LinkedService): + """Informix linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + Informix as ODBC data store. Possible values are: Anonymous and Basic. + Type: string (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(InformixLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.credential = kwargs.get('credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Informix' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py new file mode 100644 index 000000000000..03aadada664d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class InformixLinkedService(LinkedService): + """Informix linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + Informix as ODBC data store. Possible values are: Anonymous and Basic. + Type: string (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(InformixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.authentication_type = authentication_type + self.credential = credential + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Informix' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py new file mode 100644 index 000000000000..c511f4ecc174 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class InformixSink(CopySink): + """A copy activity Informix sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(InformixSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.type = 'InformixSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py new file mode 100644 index 000000000000..b0681ec0d423 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class InformixSink(CopySink): + """A copy activity Informix sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'InformixSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py new file mode 100644 index 000000000000..6cab908c7014 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class InformixSource(CopySource): + """A copy activity source for Informix. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(InformixSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'InformixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py new file mode 100644 index 000000000000..ed8fb0221239 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class InformixSource(CopySource): + """A copy activity source for Informix. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'InformixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py new file mode 100644 index 000000000000..8b7364bff652 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class InformixTableDataset(Dataset): + """The Informix table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Informix table name. Type: string (or Expression + with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(InformixTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'InformixTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py new file mode 100644 index 000000000000..05c458e797b1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class InformixTableDataset(Dataset): + """The Informix table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Informix table name. Type: string (or Expression + with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(InformixTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'InformixTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py new file mode 100644 index 000000000000..ebc0e9b38d6f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeDataProxyProperties(Model): + """Data proxy properties for a managed dedicated integration runtime. + + :param connect_via: The self-hosted integration runtime reference. + :type connect_via: ~azure.mgmt.datafactory.models.EntityReference + :param staging_linked_service: The staging linked service reference. + :type staging_linked_service: + ~azure.mgmt.datafactory.models.EntityReference + :param path: The path to contain the staged data in the Blob storage. + :type path: str + """ + + _attribute_map = { + 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, + 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) + self.connect_via = kwargs.get('connect_via', None) + self.staging_linked_service = kwargs.get('staging_linked_service', None) + self.path = kwargs.get('path', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py new file mode 100644 index 000000000000..532b774cad3d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeDataProxyProperties(Model): + """Data proxy properties for a managed dedicated integration runtime. + + :param connect_via: The self-hosted integration runtime reference. + :type connect_via: ~azure.mgmt.datafactory.models.EntityReference + :param staging_linked_service: The staging linked service reference. + :type staging_linked_service: + ~azure.mgmt.datafactory.models.EntityReference + :param path: The path to contain the staged data in the Blob storage. + :type path: str + """ + + _attribute_map = { + 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, + 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__(self, *, connect_via=None, staging_linked_service=None, path: str=None, **kwargs) -> None: + super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) + self.connect_via = connect_via + self.staging_linked_service = staging_linked_service + self.path = path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py index e1a091166529..293f071aa0b3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py @@ -30,6 +30,10 @@ class IntegrationRuntimeSsisProperties(Model): a managed dedicated integration runtime. :type custom_setup_script_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties + :param data_proxy_properties: Data proxy properties for a managed + dedicated integration runtime. + :type data_proxy_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties :param edition: The edition for the SSIS Integration Runtime. Possible values include: 'Standard', 'Enterprise' :type edition: str or @@ -41,6 +45,7 @@ class IntegrationRuntimeSsisProperties(Model): 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, 'license_type': {'key': 'licenseType', 'type': 'str'}, 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, + 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, 'edition': {'key': 'edition', 'type': 'str'}, } @@ -50,4 +55,5 @@ def __init__(self, **kwargs): self.catalog_info = kwargs.get('catalog_info', None) self.license_type = kwargs.get('license_type', None) self.custom_setup_script_properties = kwargs.get('custom_setup_script_properties', None) + self.data_proxy_properties = kwargs.get('data_proxy_properties', None) self.edition = kwargs.get('edition', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py index eb70dd23ddb7..f75775e29a7f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py @@ -30,6 +30,10 @@ class IntegrationRuntimeSsisProperties(Model): a managed dedicated integration runtime. :type custom_setup_script_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties + :param data_proxy_properties: Data proxy properties for a managed + dedicated integration runtime. + :type data_proxy_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties :param edition: The edition for the SSIS Integration Runtime. Possible values include: 'Standard', 'Enterprise' :type edition: str or @@ -41,13 +45,15 @@ class IntegrationRuntimeSsisProperties(Model): 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, 'license_type': {'key': 'licenseType', 'type': 'str'}, 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, + 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, 'edition': {'key': 'edition', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, catalog_info=None, license_type=None, custom_setup_script_properties=None, edition=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, catalog_info=None, license_type=None, custom_setup_script_properties=None, data_proxy_properties=None, edition=None, **kwargs) -> None: super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) self.additional_properties = additional_properties self.catalog_info = catalog_info self.license_type = license_type self.custom_setup_script_properties = custom_setup_script_properties + self.data_proxy_properties = data_proxy_properties self.edition = edition diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py index d8b9a62fc878..517cdd63caa5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py @@ -29,7 +29,7 @@ class JiraLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py index 69606ee7cfcf..82dc8d578da3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py @@ -29,7 +29,7 @@ class JiraLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py index 7bb6a8649b8f..709da0ce1205 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py @@ -27,6 +27,10 @@ class JiraSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class JiraSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py index 1a19ed99c55a..c958c8351bb3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py @@ -27,6 +27,10 @@ class JiraSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class JiraSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'JiraSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset.py new file mode 100644 index 000000000000..c1cee8f00b8d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class JsonDataset(Dataset): + """Json dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the json data storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param encoding_name: The code page name of the preferred encoding. If not + specified, the default value is UTF-8, unless BOM denotes another Unicode + encoding. Refer to the name column of the table in the following link to + set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param compression: The data compression method used for the json dataset. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(JsonDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.compression = kwargs.get('compression', None) + self.type = 'Json' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset_py3.py new file mode 100644 index 000000000000..564fe3bebf6b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset_py3.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class JsonDataset(Dataset): + """Json dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the json data storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param encoding_name: The code page name of the preferred encoding. If not + specified, the default value is UTF-8, unless BOM denotes another Unicode + encoding. Refer to the name column of the table in the following link to + set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param compression: The data compression method used for the json dataset. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, encoding_name=None, compression=None, **kwargs) -> None: + super(JsonDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.encoding_name = encoding_name + self.compression = compression + self.type = 'Json' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py index 736f9500018f..80f4ff0aaf8b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py @@ -30,10 +30,8 @@ class JsonFormat(DatasetStorageFormat): :type type: str :param file_pattern: File pattern of JSON. To be more specific, the way of separating a collection of JSON objects. The default value is - 'setOfObjects'. It is case-sensitive. Possible values include: - 'setOfObjects', 'arrayOfObjects' - :type file_pattern: str or - ~azure.mgmt.datafactory.models.JsonFormatFilePattern + 'setOfObjects'. It is case-sensitive. + :type file_pattern: object :param nesting_separator: The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). @@ -67,7 +65,7 @@ class JsonFormat(DatasetStorageFormat): 'serializer': {'key': 'serializer', 'type': 'object'}, 'deserializer': {'key': 'deserializer', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'encoding_name': {'key': 'encodingName', 'type': 'object'}, 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py index a9a7f20ea103..2fdb44cc3b7f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py @@ -30,10 +30,8 @@ class JsonFormat(DatasetStorageFormat): :type type: str :param file_pattern: File pattern of JSON. To be more specific, the way of separating a collection of JSON objects. The default value is - 'setOfObjects'. It is case-sensitive. Possible values include: - 'setOfObjects', 'arrayOfObjects' - :type file_pattern: str or - ~azure.mgmt.datafactory.models.JsonFormatFilePattern + 'setOfObjects'. It is case-sensitive. + :type file_pattern: object :param nesting_separator: The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). @@ -67,7 +65,7 @@ class JsonFormat(DatasetStorageFormat): 'serializer': {'key': 'serializer', 'type': 'object'}, 'deserializer': {'key': 'deserializer', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'encoding_name': {'key': 'encodingName', 'type': 'object'}, 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink.py new file mode 100644 index 000000000000..829344338672 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class JsonSink(CopySink): + """A copy activity Json sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Json store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: Json format settings. + :type format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, + } + + def __init__(self, **kwargs): + super(JsonSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + self.type = 'JsonSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink_py3.py new file mode 100644 index 000000000000..3212bb4784d8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class JsonSink(CopySink): + """A copy activity Json sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Json store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: Json format settings. + :type format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'JsonSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source.py new file mode 100644 index 000000000000..a3349ee1d39e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class JsonSource(CopySource): + """A copy activity Json source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Json store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, **kwargs): + super(JsonSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'JsonSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source_py3.py new file mode 100644 index 000000000000..7e5b73662801 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source_py3.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class JsonSource(CopySource): + """A copy activity Json source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Json store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'JsonSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings.py new file mode 100644 index 000000000000..287da9805170 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_settings import FormatWriteSettings + + +class JsonWriteSettings(FormatWriteSettings): + """Json write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param file_pattern: File pattern of JSON. This setting controls the way a + collection of JSON objects will be treated. The default value is + 'setOfObjects'. It is case-sensitive. Possible values include: + 'setOfObjects', 'arrayOfObjects' + :type file_pattern: str or + ~azure.mgmt.datafactory.models.JsonWriteFilePattern + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(JsonWriteSettings, self).__init__(**kwargs) + self.file_pattern = kwargs.get('file_pattern', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings_py3.py new file mode 100644 index 000000000000..f78f57eb1187 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_settings_py3 import FormatWriteSettings + + +class JsonWriteSettings(FormatWriteSettings): + """Json write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param file_pattern: File pattern of JSON. This setting controls the way a + collection of JSON objects will be treated. The default value is + 'setOfObjects'. It is case-sensitive. Possible values include: + 'setOfObjects', 'arrayOfObjects' + :type file_pattern: str or + ~azure.mgmt.datafactory.models.JsonWriteFilePattern + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + } + + def __init__(self, *, type: str, additional_properties=None, file_pattern=None, **kwargs) -> None: + super(JsonWriteSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.file_pattern = file_pattern diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py index 62f172fded76..2778a33fbb5a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py @@ -18,31 +18,39 @@ class LinkedService(Model): resource. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionLinkedService, ResponsysLinkedService, + sub-classes are: AzureFunctionLinkedService, + AzureDataExplorerLinkedService, SapTableLinkedService, + GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, + DynamicsAXLinkedService, ResponsysLinkedService, AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, XeroLinkedService, SquareLinkedService, SparkLinkedService, ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, - MarketoLinkedService, MariaDBLinkedService, MagentoLinkedService, - JiraLinkedService, ImpalaLinkedService, HubspotLinkedService, - HiveLinkedService, HBaseLinkedService, GreenplumLinkedService, - GoogleBigQueryLinkedService, EloquaLinkedService, DrillLinkedService, - CouchbaseLinkedService, ConcurLinkedService, AzurePostgreSqlLinkedService, - AmazonMWSLinkedService, SapHanaLinkedService, SapBWLinkedService, - SftpServerLinkedService, FtpServerLinkedService, HttpLinkedService, - AzureSearchLinkedService, CustomDataSourceLinkedService, - AmazonRedshiftLinkedService, AmazonS3LinkedService, SapEccLinkedService, - SapCloudForCustomerLinkedService, SalesforceLinkedService, - AzureDataLakeStoreLinkedService, MongoDbLinkedService, - CassandraLinkedService, WebLinkedService, ODataLinkedService, - HdfsLinkedService, OdbcLinkedService, AzureMLLinkedService, - TeradataLinkedService, Db2LinkedService, SybaseLinkedService, - PostgreSqlLinkedService, MySqlLinkedService, AzureMySqlLinkedService, - OracleLinkedService, FileServerLinkedService, HDInsightLinkedService, - DynamicsLinkedService, CosmosDbLinkedService, AzureKeyVaultLinkedService, - AzureBatchLinkedService, AzureSqlDatabaseLinkedService, + MarketoLinkedService, AzureMariaDBLinkedService, MariaDBLinkedService, + MagentoLinkedService, JiraLinkedService, ImpalaLinkedService, + HubspotLinkedService, HiveLinkedService, HBaseLinkedService, + GreenplumLinkedService, GoogleBigQueryLinkedService, EloquaLinkedService, + DrillLinkedService, CouchbaseLinkedService, ConcurLinkedService, + AzurePostgreSqlLinkedService, AmazonMWSLinkedService, SapHanaLinkedService, + SapBWLinkedService, SftpServerLinkedService, FtpServerLinkedService, + HttpLinkedService, AzureSearchLinkedService, CustomDataSourceLinkedService, + AmazonRedshiftLinkedService, AmazonS3LinkedService, + RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, + SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, + SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, + AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, + MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, + WebLinkedService, ODataLinkedService, HdfsLinkedService, + MicrosoftAccessLinkedService, InformixLinkedService, OdbcLinkedService, + AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, + SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, + AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, + HDInsightLinkedService, CommonDataServiceForAppsLinkedService, + DynamicsCrmLinkedService, DynamicsLinkedService, CosmosDbLinkedService, + AzureKeyVaultLinkedService, AzureBatchLinkedService, + AzureSqlMILinkedService, AzureSqlDatabaseLinkedService, SqlServerLinkedService, AzureSqlDWLinkedService, AzureTableStorageLinkedService, AzureBlobStorageLinkedService, AzureStorageLinkedService @@ -61,7 +69,7 @@ class LinkedService(Model): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -81,7 +89,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py index ff4bb8c7605d..2b3e475c3075 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py @@ -18,31 +18,39 @@ class LinkedService(Model): resource. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionLinkedService, ResponsysLinkedService, + sub-classes are: AzureFunctionLinkedService, + AzureDataExplorerLinkedService, SapTableLinkedService, + GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, + DynamicsAXLinkedService, ResponsysLinkedService, AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, XeroLinkedService, SquareLinkedService, SparkLinkedService, ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, - MarketoLinkedService, MariaDBLinkedService, MagentoLinkedService, - JiraLinkedService, ImpalaLinkedService, HubspotLinkedService, - HiveLinkedService, HBaseLinkedService, GreenplumLinkedService, - GoogleBigQueryLinkedService, EloquaLinkedService, DrillLinkedService, - CouchbaseLinkedService, ConcurLinkedService, AzurePostgreSqlLinkedService, - AmazonMWSLinkedService, SapHanaLinkedService, SapBWLinkedService, - SftpServerLinkedService, FtpServerLinkedService, HttpLinkedService, - AzureSearchLinkedService, CustomDataSourceLinkedService, - AmazonRedshiftLinkedService, AmazonS3LinkedService, SapEccLinkedService, - SapCloudForCustomerLinkedService, SalesforceLinkedService, - AzureDataLakeStoreLinkedService, MongoDbLinkedService, - CassandraLinkedService, WebLinkedService, ODataLinkedService, - HdfsLinkedService, OdbcLinkedService, AzureMLLinkedService, - TeradataLinkedService, Db2LinkedService, SybaseLinkedService, - PostgreSqlLinkedService, MySqlLinkedService, AzureMySqlLinkedService, - OracleLinkedService, FileServerLinkedService, HDInsightLinkedService, - DynamicsLinkedService, CosmosDbLinkedService, AzureKeyVaultLinkedService, - AzureBatchLinkedService, AzureSqlDatabaseLinkedService, + MarketoLinkedService, AzureMariaDBLinkedService, MariaDBLinkedService, + MagentoLinkedService, JiraLinkedService, ImpalaLinkedService, + HubspotLinkedService, HiveLinkedService, HBaseLinkedService, + GreenplumLinkedService, GoogleBigQueryLinkedService, EloquaLinkedService, + DrillLinkedService, CouchbaseLinkedService, ConcurLinkedService, + AzurePostgreSqlLinkedService, AmazonMWSLinkedService, SapHanaLinkedService, + SapBWLinkedService, SftpServerLinkedService, FtpServerLinkedService, + HttpLinkedService, AzureSearchLinkedService, CustomDataSourceLinkedService, + AmazonRedshiftLinkedService, AmazonS3LinkedService, + RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, + SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, + SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, + AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, + MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, + WebLinkedService, ODataLinkedService, HdfsLinkedService, + MicrosoftAccessLinkedService, InformixLinkedService, OdbcLinkedService, + AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, + SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, + AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, + HDInsightLinkedService, CommonDataServiceForAppsLinkedService, + DynamicsCrmLinkedService, DynamicsLinkedService, CosmosDbLinkedService, + AzureKeyVaultLinkedService, AzureBatchLinkedService, + AzureSqlMILinkedService, AzureSqlDatabaseLinkedService, SqlServerLinkedService, AzureSqlDWLinkedService, AzureTableStorageLinkedService, AzureBlobStorageLinkedService, AzureStorageLinkedService @@ -61,7 +69,7 @@ class LinkedService(Model): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -81,7 +89,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py index 5fb8974f28db..9d65437b5daa 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py @@ -29,7 +29,7 @@ class MagentoLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py index 420656103983..74de1573118b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py @@ -29,7 +29,7 @@ class MagentoLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py index 679ba2a0669e..df49fe63a544 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py @@ -27,6 +27,10 @@ class MagentoSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class MagentoSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py index a01cf80a969a..15efcc12a054 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py @@ -27,6 +27,10 @@ class MagentoSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class MagentoSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'MagentoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py index 0a98a04138dc..3bbe048d4877 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py @@ -29,7 +29,7 @@ class MariaDBLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py index ef1114660ad7..475284d56038 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py @@ -29,7 +29,7 @@ class MariaDBLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py index 96b7116cd3ac..a744c1c5ff8f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py @@ -27,6 +27,10 @@ class MariaDBSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class MariaDBSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py index 1dbb6f327d04..472877b8f0bb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py @@ -27,6 +27,10 @@ class MariaDBSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class MariaDBSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'MariaDBSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py index 432676824a75..2a9e76446122 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py @@ -29,7 +29,7 @@ class MarketoLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py index b4e360931809..dc326f24acd5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py @@ -29,7 +29,7 @@ class MarketoLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py index 4867951baae7..6d2061ef0dee 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py @@ -27,6 +27,10 @@ class MarketoSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class MarketoSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py index 52c16eae0437..573dc0439754 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py @@ -27,6 +27,10 @@ class MarketoSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class MarketoSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'MarketoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py new file mode 100644 index 000000000000..b53164f6266b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class MicrosoftAccessLinkedService(LinkedService): + """Microsoft Access linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + Microsoft Access as ODBC data store. Possible values are: Anonymous and + Basic. Type: string (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MicrosoftAccessLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.credential = kwargs.get('credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'MicrosoftAccess' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py new file mode 100644 index 000000000000..c9f79c24adf3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MicrosoftAccessLinkedService(LinkedService): + """Microsoft Access linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + Microsoft Access as ODBC data store. Possible values are: Anonymous and + Basic. Type: string (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(MicrosoftAccessLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.authentication_type = authentication_type + self.credential = credential + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'MicrosoftAccess' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py new file mode 100644 index 000000000000..53406fa25022 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class MicrosoftAccessSink(CopySink): + """A copy activity Microsoft Access sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MicrosoftAccessSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.type = 'MicrosoftAccessSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py new file mode 100644 index 000000000000..700db840c03d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class MicrosoftAccessSink(CopySink): + """A copy activity Microsoft Access sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'MicrosoftAccessSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py new file mode 100644 index 000000000000..73cd3a64184c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class MicrosoftAccessSource(CopySource): + """A copy activity source for Microsoft Access. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MicrosoftAccessSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'MicrosoftAccessSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py new file mode 100644 index 000000000000..1cccd82c8b19 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MicrosoftAccessSource(CopySource): + """A copy activity source for Microsoft Access. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MicrosoftAccessSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py new file mode 100644 index 000000000000..f312dae024f5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class MicrosoftAccessTableDataset(Dataset): + """The Microsoft Access table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Microsoft Access table name. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MicrosoftAccessTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'MicrosoftAccessTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py new file mode 100644 index 000000000000..3fad904ef58b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MicrosoftAccessTableDataset(Dataset): + """The Microsoft Access table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Microsoft Access table name. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'MicrosoftAccessTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py new file mode 100644 index 000000000000..a2d2127d1397 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class MongoDbCursorMethodsProperties(Model): + """Cursor methods for Mongodb query. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param project: Specifies the fields to return in the documents that match + the query filter. To return all fields in the matching documents, omit + this parameter. Type: string (or Expression with resultType string). + :type project: object + :param sort: Specifies the order in which the query returns matching + documents. Type: string (or Expression with resultType string). Type: + string (or Expression with resultType string). + :type sort: object + :param skip: Specifies the how many documents skipped and where MongoDB + begins returning results. This approach may be useful in implementing + paginated results. Type: integer (or Expression with resultType integer). + :type skip: object + :param limit: Specifies the maximum number of documents the server + returns. limit() is analogous to the LIMIT statement in a SQL database. + Type: integer (or Expression with resultType integer). + :type limit: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'project': {'key': 'project', 'type': 'object'}, + 'sort': {'key': 'sort', 'type': 'object'}, + 'skip': {'key': 'skip', 'type': 'object'}, + 'limit': {'key': 'limit', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.project = kwargs.get('project', None) + self.sort = kwargs.get('sort', None) + self.skip = kwargs.get('skip', None) + self.limit = kwargs.get('limit', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py new file mode 100644 index 000000000000..e1e3f50d1539 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class MongoDbCursorMethodsProperties(Model): + """Cursor methods for Mongodb query. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param project: Specifies the fields to return in the documents that match + the query filter. To return all fields in the matching documents, omit + this parameter. Type: string (or Expression with resultType string). + :type project: object + :param sort: Specifies the order in which the query returns matching + documents. Type: string (or Expression with resultType string). Type: + string (or Expression with resultType string). + :type sort: object + :param skip: Specifies the how many documents skipped and where MongoDB + begins returning results. This approach may be useful in implementing + paginated results. Type: integer (or Expression with resultType integer). + :type skip: object + :param limit: Specifies the maximum number of documents the server + returns. limit() is analogous to the LIMIT statement in a SQL database. + Type: integer (or Expression with resultType integer). + :type limit: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'project': {'key': 'project', 'type': 'object'}, + 'sort': {'key': 'sort', 'type': 'object'}, + 'skip': {'key': 'skip', 'type': 'object'}, + 'limit': {'key': 'limit', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, project=None, sort=None, skip=None, limit=None, **kwargs) -> None: + super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.project = project + self.sort = sort + self.skip = skip + self.limit = limit diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py index 49d53510f7fd..76d162b0ff70 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py @@ -29,7 +29,7 @@ class MongoDbLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py index c1d96a5465b9..95308b6ea8f0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py @@ -29,7 +29,7 @@ class MongoDbLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py index b9f0be6b97d3..3da4b931f5e5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py @@ -27,6 +27,10 @@ class MongoDbSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Should be a SQL-92 query expression. Type: @@ -42,6 +46,7 @@ class MongoDbSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py index b4f01d8d7ffb..ab3e5b6e0cc9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py @@ -27,6 +27,10 @@ class MongoDbSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Should be a SQL-92 query expression. Type: @@ -42,11 +46,12 @@ class MongoDbSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'MongoDbSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py new file mode 100644 index 000000000000..17089373d4c5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class MongoDbV2CollectionDataset(Dataset): + """The MongoDB database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the MongoDB database. + Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbV2CollectionDataset, self).__init__(**kwargs) + self.collection = kwargs.get('collection', None) + self.type = 'MongoDbV2Collection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py new file mode 100644 index 000000000000..ad1e5c538645 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MongoDbV2CollectionDataset(Dataset): + """The MongoDB database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the MongoDB database. + Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, collection, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(MongoDbV2CollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.collection = collection + self.type = 'MongoDbV2Collection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py new file mode 100644 index 000000000000..bb29fc767420 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class MongoDbV2LinkedService(LinkedService): + """Linked service for MongoDB data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The MongoDB connection string. Type: + string, SecureString or AzureKeyVaultSecretReference. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB database that you want + to access. Type: string (or Expression with resultType string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbV2LinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.database = kwargs.get('database', None) + self.type = 'MongoDbV2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py new file mode 100644 index 000000000000..d1388ce797a5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MongoDbV2LinkedService(LinkedService): + """Linked service for MongoDB data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The MongoDB connection string. Type: + string, SecureString or AzureKeyVaultSecretReference. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB database that you want + to access. Type: string (or Expression with resultType string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(MongoDbV2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.database = database + self.type = 'MongoDbV2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py new file mode 100644 index 000000000000..e951674a8e22 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class MongoDbV2Source(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property's + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbV2Source, self).__init__(**kwargs) + self.filter = kwargs.get('filter', None) + self.cursor_methods = kwargs.get('cursor_methods', None) + self.batch_size = kwargs.get('batch_size', None) + self.type = 'MongoDbV2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py new file mode 100644 index 000000000000..9b8eec114a06 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MongoDbV2Source(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property's + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, **kwargs) -> None: + super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size + self.type = 'MongoDbV2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py index dd279ab6baa3..1be28aa1b6ab 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py @@ -34,6 +34,9 @@ class MultiplePipelineTrigger(Trigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. @@ -50,6 +53,7 @@ class MultiplePipelineTrigger(Trigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py index 3400431e49e2..206ab74ef419 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py @@ -34,6 +34,9 @@ class MultiplePipelineTrigger(Trigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. @@ -50,6 +53,7 @@ class MultiplePipelineTrigger(Trigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, } @@ -58,7 +62,7 @@ class MultiplePipelineTrigger(Trigger): 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} } - def __init__(self, *, additional_properties=None, description: str=None, pipelines=None, **kwargs) -> None: - super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, **kwargs) + def __init__(self, *, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: + super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) self.pipelines = pipelines self.type = 'MultiplePipelineTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py index 542fb13b7a37..ec85b0136714 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py @@ -29,7 +29,7 @@ class MySqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py index cd87d5e7e3b5..b8038df22fd6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py @@ -29,7 +29,7 @@ class MySqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source.py new file mode 100644 index 000000000000..c2b0b66eabb1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class MySqlSource(CopySource): + """A copy activity source for MySQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MySqlSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'MySqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source_py3.py new file mode 100644 index 000000000000..3a0315d83979 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MySqlSource(CopySource): + """A copy activity source for MySQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MySqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset.py new file mode 100644 index 000000000000..3bb1584975d5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class MySqlTableDataset(Dataset): + """The MySQL table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The MySQL table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MySqlTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'MySqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset_py3.py new file mode 100644 index 000000000000..33263561dfde --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MySqlTableDataset(Dataset): + """The MySQL table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The MySQL table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(MySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'MySqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py index 319a68efddc5..5d94bdecaf62 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py @@ -29,7 +29,7 @@ class NetezzaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py index 6c3b607d60dc..2fcc288fd5b7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py @@ -29,7 +29,7 @@ class NetezzaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings.py new file mode 100644 index 000000000000..b6c1ca9ba5da --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class NetezzaPartitionSettings(Model): + """The settings that will be leveraged for Netezza source partitioning. + + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(NetezzaPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings_py3.py new file mode 100644 index 000000000000..9f071eae60ff --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class NetezzaPartitionSettings(Model): + """The settings that will be leveraged for Netezza source partitioning. + + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: + super(NetezzaPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py index 0c08b1440614..3c66032bf48d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py @@ -27,11 +27,24 @@ class NetezzaSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object + :param partition_option: The partition mechanism that will be used for + Netezza read in parallel. Possible values include: 'None', 'DataSlice', + 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.NetezzaPartitionOption + :param partition_settings: The settings that will be leveraged for Netezza + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.NetezzaPartitionSettings """ _validation = { @@ -42,11 +55,16 @@ class NetezzaSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, } def __init__(self, **kwargs): super(NetezzaSource, self).__init__(**kwargs) self.query = kwargs.get('query', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) self.type = 'NetezzaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py index 2b4c38f708ee..f5dcc07e63d8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py @@ -27,11 +27,24 @@ class NetezzaSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object + :param partition_option: The partition mechanism that will be used for + Netezza read in parallel. Possible values include: 'None', 'DataSlice', + 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.NetezzaPartitionOption + :param partition_settings: The settings that will be leveraged for Netezza + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.NetezzaPartitionSettings """ _validation = { @@ -42,11 +55,16 @@ class NetezzaSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query + self.partition_option = partition_option + self.partition_settings = partition_settings self.type = 'NetezzaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py index cf3b9205846c..b7807273262b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py @@ -43,9 +43,15 @@ class NetezzaTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Netezza. Type: string (or Expression + with resultType string). + :type table: object + :param netezza_table_dataset_schema: The schema name of the Netezza. Type: + string (or Expression with resultType string). + :type netezza_table_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class NetezzaTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'netezza_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, **kwargs): super(NetezzaTableDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.netezza_table_dataset_schema = kwargs.get('netezza_table_dataset_schema', None) self.type = 'NetezzaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset_py3.py index 39de0032e8c9..29dd448ada75 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset_py3.py @@ -43,9 +43,15 @@ class NetezzaTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Netezza. Type: string (or Expression + with resultType string). + :type table: object + :param netezza_table_dataset_schema: The schema name of the Netezza. Type: + string (or Expression with resultType string). + :type netezza_table_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class NetezzaTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'netezza_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, netezza_table_dataset_schema=None, **kwargs) -> None: super(NetezzaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.table = table + self.netezza_table_dataset_schema = netezza_table_dataset_schema self.type = 'NetezzaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py index 9a7edca9ddb1..01db8d71e924 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py @@ -29,7 +29,7 @@ class ODataLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -37,7 +37,8 @@ class ODataLinkedService(LinkedService): (or Expression with resultType string). :type url: object :param authentication_type: Type of authentication used to connect to the - OData service. Possible values include: 'Basic', 'Anonymous' + OData service. Possible values include: 'Basic', 'Anonymous', 'Windows', + 'AadServicePrincipal', 'ManagedServiceIdentity' :type authentication_type: str or ~azure.mgmt.datafactory.models.ODataAuthenticationType :param user_name: User name of the OData service. Type: string (or @@ -45,6 +46,38 @@ class ODataLinkedService(LinkedService): :type user_name: object :param password: Password of the OData service. :type password: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Specify the tenant information (domain name or tenant ID) + under which your application resides. Type: string (or Expression with + resultType string). + :type tenant: object + :param service_principal_id: Specify the application id of your + application registered in Azure Active Directory. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param aad_resource_id: Specify the resource you are requesting + authorization to use Directory. Type: string (or Expression with + resultType string). + :type aad_resource_id: object + :param aad_service_principal_credential_type: Specify the credential type + (key or cert) is used for service principal. Possible values include: + 'ServicePrincipalKey', 'ServicePrincipalCert' + :type aad_service_principal_credential_type: str or + ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType + :param service_principal_key: Specify the secret of your application + registered in Azure Active Directory. Type: string (or Expression with + resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert: Specify the base64 encoded + certificate of your application registered in Azure Active Directory. + Type: string (or Expression with resultType string). + :type service_principal_embedded_cert: + ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert_password: Specify the password of + your certificate if your certificate has a password and you are using + AadServicePrincipal authentication. Type: string (or Expression with + resultType string). + :type service_principal_embedded_cert_password: + ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -67,6 +100,13 @@ class ODataLinkedService(LinkedService): 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, + 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -76,5 +116,12 @@ def __init__(self, **kwargs): self.authentication_type = kwargs.get('authentication_type', None) self.user_name = kwargs.get('user_name', None) self.password = kwargs.get('password', None) + self.tenant = kwargs.get('tenant', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.aad_resource_id = kwargs.get('aad_resource_id', None) + self.aad_service_principal_credential_type = kwargs.get('aad_service_principal_credential_type', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.service_principal_embedded_cert = kwargs.get('service_principal_embedded_cert', None) + self.service_principal_embedded_cert_password = kwargs.get('service_principal_embedded_cert_password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'OData' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py index 688bb4e4ffda..fcf2d8bb9819 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py @@ -29,7 +29,7 @@ class ODataLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -37,7 +37,8 @@ class ODataLinkedService(LinkedService): (or Expression with resultType string). :type url: object :param authentication_type: Type of authentication used to connect to the - OData service. Possible values include: 'Basic', 'Anonymous' + OData service. Possible values include: 'Basic', 'Anonymous', 'Windows', + 'AadServicePrincipal', 'ManagedServiceIdentity' :type authentication_type: str or ~azure.mgmt.datafactory.models.ODataAuthenticationType :param user_name: User name of the OData service. Type: string (or @@ -45,6 +46,38 @@ class ODataLinkedService(LinkedService): :type user_name: object :param password: Password of the OData service. :type password: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Specify the tenant information (domain name or tenant ID) + under which your application resides. Type: string (or Expression with + resultType string). + :type tenant: object + :param service_principal_id: Specify the application id of your + application registered in Azure Active Directory. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param aad_resource_id: Specify the resource you are requesting + authorization to use Directory. Type: string (or Expression with + resultType string). + :type aad_resource_id: object + :param aad_service_principal_credential_type: Specify the credential type + (key or cert) is used for service principal. Possible values include: + 'ServicePrincipalKey', 'ServicePrincipalCert' + :type aad_service_principal_credential_type: str or + ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType + :param service_principal_key: Specify the secret of your application + registered in Azure Active Directory. Type: string (or Expression with + resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert: Specify the base64 encoded + certificate of your application registered in Azure Active Directory. + Type: string (or Expression with resultType string). + :type service_principal_embedded_cert: + ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert_password: Specify the password of + your certificate if your certificate has a password and you are using + AadServicePrincipal authentication. Type: string (or Expression with + resultType string). + :type service_principal_embedded_cert_password: + ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -67,14 +100,28 @@ class ODataLinkedService(LinkedService): 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, + 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, tenant=None, service_principal_id=None, aad_resource_id=None, aad_service_principal_credential_type=None, service_principal_key=None, service_principal_embedded_cert=None, service_principal_embedded_cert_password=None, encrypted_credential=None, **kwargs) -> None: super(ODataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.url = url self.authentication_type = authentication_type self.user_name = user_name self.password = password + self.tenant = tenant + self.service_principal_id = service_principal_id + self.aad_resource_id = aad_resource_id + self.aad_service_principal_credential_type = aad_service_principal_credential_type + self.service_principal_key = service_principal_key + self.service_principal_embedded_cert = service_principal_embedded_cert + self.service_principal_embedded_cert_password = service_principal_embedded_cert_password self.encrypted_credential = encrypted_credential self.type = 'OData' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source.py new file mode 100644 index 000000000000..c70f440ff6cb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class ODataSource(CopySource): + """A copy activity source for OData source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: OData query. For example, "$top=1". Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ODataSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'ODataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source_py3.py new file mode 100644 index 000000000000..83ba9bd7f2af --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ODataSource(CopySource): + """A copy activity source for OData source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: OData query. For example, "$top=1". Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ODataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py index 43559b76e0e0..53d21dee2def 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py @@ -29,7 +29,7 @@ class OdbcLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py index e0147881f3d0..2e376d23c67a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py @@ -29,7 +29,7 @@ class OdbcLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py index 4598952cb21b..ced7e1dbd9e4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py @@ -34,6 +34,10 @@ class OdbcSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -51,6 +55,7 @@ class OdbcSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py index 430329bdf2b9..9a181f8df7e9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py @@ -34,6 +34,10 @@ class OdbcSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -51,11 +55,12 @@ class OdbcSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, pre_copy_script=None, **kwargs) -> None: - super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'OdbcSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source.py new file mode 100644 index 000000000000..9761d0c0aeb5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class OdbcSource(CopySource): + """A copy activity source for ODBC databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OdbcSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'OdbcSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source_py3.py new file mode 100644 index 000000000000..52b059a8ad91 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class OdbcSource(CopySource): + """A copy activity source for ODBC databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'OdbcSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset.py new file mode 100644 index 000000000000..2f4f4261f4fc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class OdbcTableDataset(Dataset): + """The ODBC table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The ODBC table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OdbcTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'OdbcTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset_py3.py new file mode 100644 index 000000000000..070ddccd180d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class OdbcTableDataset(Dataset): + """The ODBC table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The ODBC table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(OdbcTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'OdbcTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py new file mode 100644 index 000000000000..baa90666d669 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class Office365Dataset(Dataset): + """The Office365 account. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. Name of the dataset to extract from Office + 365. Type: string (or Expression with resultType string). + :type table_name: object + :param predicate: A predicate expression that can be used to filter the + specific rows to extract from Office 365. Type: string (or Expression with + resultType string). + :type predicate: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Office365Dataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.predicate = kwargs.get('predicate', None) + self.type = 'Office365Table' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py new file mode 100644 index 000000000000..5517f7daf9e3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class Office365Dataset(Dataset): + """The Office365 account. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. Name of the dataset to extract from Office + 365. Type: string (or Expression with resultType string). + :type table_name: object + :param predicate: A predicate expression that can be used to filter the + specific rows to extract from Office 365. Type: string (or Expression with + resultType string). + :type predicate: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, predicate=None, **kwargs) -> None: + super(Office365Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.predicate = predicate + self.type = 'Office365Table' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py new file mode 100644 index 000000000000..2dc98897482a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class Office365LinkedService(LinkedService): + """Office365 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param office365_tenant_id: Required. Azure tenant ID to which the Office + 365 account belongs. Type: string (or Expression with resultType string). + :type office365_tenant_id: object + :param service_principal_tenant_id: Required. Specify the tenant + information under which your Azure AD web application resides. Type: + string (or Expression with resultType string). + :type service_principal_tenant_id: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'office365_tenant_id': {'required': True}, + 'service_principal_tenant_id': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, + 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Office365LinkedService, self).__init__(**kwargs) + self.office365_tenant_id = kwargs.get('office365_tenant_id', None) + self.service_principal_tenant_id = kwargs.get('service_principal_tenant_id', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Office365' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py new file mode 100644 index 000000000000..5a69c0d895fa --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class Office365LinkedService(LinkedService): + """Office365 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param office365_tenant_id: Required. Azure tenant ID to which the Office + 365 account belongs. Type: string (or Expression with resultType string). + :type office365_tenant_id: object + :param service_principal_tenant_id: Required. Specify the tenant + information under which your Azure AD web application resides. Type: + string (or Expression with resultType string). + :type service_principal_tenant_id: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'office365_tenant_id': {'required': True}, + 'service_principal_tenant_id': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, + 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, office365_tenant_id, service_principal_tenant_id, service_principal_id, service_principal_key, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, encrypted_credential=None, **kwargs) -> None: + super(Office365LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.office365_tenant_id = office365_tenant_id + self.service_principal_tenant_id = service_principal_tenant_id + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.encrypted_credential = encrypted_credential + self.type = 'Office365' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py new file mode 100644 index 000000000000..de19818aaa7f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class Office365Source(CopySource): + """A copy activity source for an Office365 service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param allowed_groups: The groups containing all the users. Type: array of + strings (or Expression with resultType array of strings). + :type allowed_groups: object + :param user_scope_filter_uri: The user scope uri. Type: string (or + Expression with resultType string). + :type user_scope_filter_uri: object + :param date_filter_column: The Column to apply the and . Type: string (or + Expression with resultType string). + :type date_filter_column: object + :param start_time: Start time of the requested range for this dataset. + Type: string (or Expression with resultType string). + :type start_time: object + :param end_time: End time of the requested range for this dataset. Type: + string (or Expression with resultType string). + :type end_time: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, + 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, + 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, + 'start_time': {'key': 'startTime', 'type': 'object'}, + 'end_time': {'key': 'endTime', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Office365Source, self).__init__(**kwargs) + self.allowed_groups = kwargs.get('allowed_groups', None) + self.user_scope_filter_uri = kwargs.get('user_scope_filter_uri', None) + self.date_filter_column = kwargs.get('date_filter_column', None) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) + self.type = 'Office365Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py new file mode 100644 index 000000000000..fc2c4b095904 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class Office365Source(CopySource): + """A copy activity source for an Office365 service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param allowed_groups: The groups containing all the users. Type: array of + strings (or Expression with resultType array of strings). + :type allowed_groups: object + :param user_scope_filter_uri: The user scope uri. Type: string (or + Expression with resultType string). + :type user_scope_filter_uri: object + :param date_filter_column: The Column to apply the and . Type: string (or + Expression with resultType string). + :type date_filter_column: object + :param start_time: Start time of the requested range for this dataset. + Type: string (or Expression with resultType string). + :type start_time: object + :param end_time: End time of the requested range for this dataset. Type: + string (or Expression with resultType string). + :type end_time: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, + 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, + 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, + 'start_time': {'key': 'startTime', 'type': 'object'}, + 'end_time': {'key': 'endTime', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, allowed_groups=None, user_scope_filter_uri=None, date_filter_column=None, start_time=None, end_time=None, **kwargs) -> None: + super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.allowed_groups = allowed_groups + self.user_scope_filter_uri = user_scope_filter_uri + self.date_filter_column = date_filter_column + self.start_time = start_time + self.end_time = end_time + self.type = 'Office365Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py index 5485151adb1f..19f715dfd9e2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py @@ -29,7 +29,7 @@ class OracleLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py index 80b0ed1176ff..a46f0463afb5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py @@ -29,7 +29,7 @@ class OracleLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py new file mode 100644 index 000000000000..b4e9aa1b92f3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OraclePartitionSettings(Model): + """The settings that will be leveraged for Oracle source partitioning. + + :param partition_names: Names of the physical partitions of Oracle table. + :type partition_names: object + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_names': {'key': 'partitionNames', 'type': 'object'}, + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OraclePartitionSettings, self).__init__(**kwargs) + self.partition_names = kwargs.get('partition_names', None) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py new file mode 100644 index 000000000000..10641aab7f9f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OraclePartitionSettings(Model): + """The settings that will be leveraged for Oracle source partitioning. + + :param partition_names: Names of the physical partitions of Oracle table. + :type partition_names: object + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_names': {'key': 'partitionNames', 'type': 'object'}, + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, *, partition_names=None, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: + super(OraclePartitionSettings, self).__init__(**kwargs) + self.partition_names = partition_names + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py new file mode 100644 index 000000000000..44ce000868b7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py @@ -0,0 +1,95 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class OracleServiceCloudLinkedService(LinkedService): + """Oracle Service Cloud linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Oracle Service Cloud instance. + :type host: object + :param username: Required. The user name that you use to access Oracle + Service Cloud server. + :type username: object + :param password: Required. The password corresponding to the user name + that you provided in the username key. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleServiceCloudLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'OracleServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py new file mode 100644 index 000000000000..8732e2e82ca0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py @@ -0,0 +1,95 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class OracleServiceCloudLinkedService(LinkedService): + """Oracle Service Cloud linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Oracle Service Cloud instance. + :type host: object + :param username: Required. The user name that you use to access Oracle + Service Cloud server. + :type username: object + :param password: Required. The password corresponding to the user name + that you provided in the username key. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, username, password, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(OracleServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'OracleServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py new file mode 100644 index 000000000000..35ce3439d8a0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class OracleServiceCloudObjectDataset(Dataset): + """Oracle Service Cloud dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleServiceCloudObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'OracleServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py new file mode 100644 index 000000000000..a478e1abc828 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class OracleServiceCloudObjectDataset(Dataset): + """Oracle Service Cloud dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(OracleServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'OracleServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py new file mode 100644 index 000000000000..f42291941393 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class OracleServiceCloudSource(CopySource): + """A copy activity Oracle Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleServiceCloudSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'OracleServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py new file mode 100644 index 000000000000..1fa5d6eb3748 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class OracleServiceCloudSource(CopySource): + """A copy activity Oracle Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'OracleServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py index fa0e11f57553..1f6c747c49db 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py @@ -34,6 +34,10 @@ class OracleSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression @@ -51,6 +55,7 @@ class OracleSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py index a6b666d31ed7..3a571c66732a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py @@ -34,6 +34,10 @@ class OracleSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression @@ -51,11 +55,12 @@ class OracleSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, pre_copy_script=None, **kwargs) -> None: - super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'OracleSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py index 3f74cf83ee7a..db436192eca1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py @@ -27,6 +27,10 @@ class OracleSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param oracle_reader_query: Oracle reader query. Type: string (or @@ -36,6 +40,15 @@ class OracleSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param partition_option: The partition mechanism that will be used for + Oracle read in parallel. Possible values include: 'None', + 'PhysicalPartitionsOfTable', 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.OraclePartitionOption + :param partition_settings: The settings that will be leveraged for Oracle + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.OraclePartitionSettings """ _validation = { @@ -46,13 +59,18 @@ class OracleSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, } def __init__(self, **kwargs): super(OracleSource, self).__init__(**kwargs) self.oracle_reader_query = kwargs.get('oracle_reader_query', None) self.query_timeout = kwargs.get('query_timeout', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) self.type = 'OracleSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py index 89252615e6e5..0a871809896e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py @@ -27,6 +27,10 @@ class OracleSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param oracle_reader_query: Oracle reader query. Type: string (or @@ -36,6 +40,15 @@ class OracleSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param partition_option: The partition mechanism that will be used for + Oracle read in parallel. Possible values include: 'None', + 'PhysicalPartitionsOfTable', 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.OraclePartitionOption + :param partition_settings: The settings that will be leveraged for Oracle + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.OraclePartitionSettings """ _validation = { @@ -46,13 +59,18 @@ class OracleSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, oracle_reader_query=None, query_timeout=None, **kwargs) -> None: - super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.oracle_reader_query = oracle_reader_query self.query_timeout = query_timeout + self.partition_option = partition_option + self.partition_settings = partition_settings self.type = 'OracleSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py index 4af8faaca8db..c76b5ced3e5c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py @@ -43,15 +43,20 @@ class OracleTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: Required. The table name of the on-premises Oracle - database. Type: string (or Expression with resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param oracle_table_dataset_schema: The schema name of the on-premises + Oracle database. Type: string (or Expression with resultType string). + :type oracle_table_dataset_schema: object + :param table: The table name of the on-premises Oracle database. Type: + string (or Expression with resultType string). + :type table: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { @@ -65,9 +70,13 @@ class OracleTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'oracle_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__(self, **kwargs): super(OracleTableDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.oracle_table_dataset_schema = kwargs.get('oracle_table_dataset_schema', None) + self.table = kwargs.get('table', None) self.type = 'OracleTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py index aaa1291c8f76..b588fbac5244 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py @@ -43,15 +43,20 @@ class OracleTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: Required. The table name of the on-premises Oracle - database. Type: string (or Expression with resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param oracle_table_dataset_schema: The schema name of the on-premises + Oracle database. Type: string (or Expression with resultType string). + :type oracle_table_dataset_schema: object + :param table: The table name of the on-premises Oracle database. Type: + string (or Expression with resultType string). + :type table: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { @@ -65,9 +70,13 @@ class OracleTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'oracle_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, oracle_table_dataset_schema=None, table=None, **kwargs) -> None: super(OracleTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.oracle_table_dataset_schema = oracle_table_dataset_schema + self.table = table self.type = 'OracleTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py new file mode 100644 index 000000000000..ffaf8e1f6d93 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class ParquetDataset(Dataset): + """Parquet dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the parquet storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression_codec: + :type compression_codec: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ParquetDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.compression_codec = kwargs.get('compression_codec', None) + self.type = 'Parquet' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py new file mode 100644 index 000000000000..4d754450ce15 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ParquetDataset(Dataset): + """Parquet dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the parquet storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression_codec: + :type compression_codec: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression_codec=None, **kwargs) -> None: + super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.compression_codec = compression_codec + self.type = 'Parquet' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py new file mode 100644 index 000000000000..dea3e0f8fc52 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class ParquetSink(CopySink): + """A copy activity Parquet sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + } + + def __init__(self, **kwargs): + super(ParquetSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'ParquetSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py new file mode 100644 index 000000000000..463044fef83f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class ParquetSink(CopySink): + """A copy activity Parquet sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'ParquetSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py new file mode 100644 index 000000000000..ab888c7361a2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class ParquetSource(CopySource): + """A copy activity Parquet source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, **kwargs): + super(ParquetSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'ParquetSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py new file mode 100644 index 000000000000..332a7b9b8c5e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ParquetSource(CopySource): + """A copy activity Parquet source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'ParquetSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py index 190fc45985d3..d7ae0bc075e7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py @@ -29,7 +29,7 @@ class PaypalLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py index 832b0dff257b..c11cda7a52f3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py @@ -29,7 +29,7 @@ class PaypalLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py index 5bb73029d10c..94cdbccae6ee 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py @@ -27,6 +27,10 @@ class PaypalSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class PaypalSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py index 6a9dcce16a2d..05730d0ae067 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py @@ -27,6 +27,10 @@ class PaypalSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class PaypalSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'PaypalSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py index b9d16bc32c56..308a8e4cf592 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py @@ -29,7 +29,7 @@ class PhoenixLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py index aeb89e4fdd4a..de8210c2cc89 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py @@ -29,7 +29,7 @@ class PhoenixLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py index 2d9cd5dcd581..ccaa2eb49abd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py @@ -43,9 +43,15 @@ class PhoenixObjectDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Phoenix. Type: string (or Expression + with resultType string). + :type table: object + :param phoenix_object_dataset_schema: The schema name of the Phoenix. + Type: string (or Expression with resultType string). + :type phoenix_object_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class PhoenixObjectDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'phoenix_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, **kwargs): super(PhoenixObjectDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.phoenix_object_dataset_schema = kwargs.get('phoenix_object_dataset_schema', None) self.type = 'PhoenixObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py index 32c6e5f9836f..cda4dc41dc22 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py @@ -43,9 +43,15 @@ class PhoenixObjectDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Phoenix. Type: string (or Expression + with resultType string). + :type table: object + :param phoenix_object_dataset_schema: The schema name of the Phoenix. + Type: string (or Expression with resultType string). + :type phoenix_object_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class PhoenixObjectDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'phoenix_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, phoenix_object_dataset_schema=None, **kwargs) -> None: super(PhoenixObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.table = table + self.phoenix_object_dataset_schema = phoenix_object_dataset_schema self.type = 'PhoenixObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py index daad6ec41c31..30171c6177ff 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py @@ -27,6 +27,10 @@ class PhoenixSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class PhoenixSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py index 619e7220dd09..1384f59e1aa4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py @@ -27,6 +27,10 @@ class PhoenixSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class PhoenixSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'PhoenixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py index 3ae4beb48ff1..a2407bd9835f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py @@ -23,6 +23,12 @@ class PipelineRun(Model): :type additional_properties: dict[str, object] :ivar run_id: Identifier of a run. :vartype run_id: str + :ivar run_group_id: Identifier that correlates all the recovery runs of a + pipeline run. + :vartype run_group_id: str + :ivar is_latest: Indicates if the recovered pipeline run is the latest in + its group. + :vartype is_latest: bool :ivar pipeline_name: The pipeline name. :vartype pipeline_name: str :ivar parameters: The full or partial list of parameter name, value pair @@ -47,6 +53,8 @@ class PipelineRun(Model): _validation = { 'run_id': {'readonly': True}, + 'run_group_id': {'readonly': True}, + 'is_latest': {'readonly': True}, 'pipeline_name': {'readonly': True}, 'parameters': {'readonly': True}, 'invoked_by': {'readonly': True}, @@ -61,6 +69,8 @@ class PipelineRun(Model): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'run_id': {'key': 'runId', 'type': 'str'}, + 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, + 'is_latest': {'key': 'isLatest', 'type': 'bool'}, 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{str}'}, 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, @@ -76,6 +86,8 @@ def __init__(self, **kwargs): super(PipelineRun, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.run_id = None + self.run_group_id = None + self.is_latest = None self.pipeline_name = None self.parameters = None self.invoked_by = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py index aed5dd0466d2..33e0f23f24ac 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py @@ -23,6 +23,12 @@ class PipelineRun(Model): :type additional_properties: dict[str, object] :ivar run_id: Identifier of a run. :vartype run_id: str + :ivar run_group_id: Identifier that correlates all the recovery runs of a + pipeline run. + :vartype run_group_id: str + :ivar is_latest: Indicates if the recovered pipeline run is the latest in + its group. + :vartype is_latest: bool :ivar pipeline_name: The pipeline name. :vartype pipeline_name: str :ivar parameters: The full or partial list of parameter name, value pair @@ -47,6 +53,8 @@ class PipelineRun(Model): _validation = { 'run_id': {'readonly': True}, + 'run_group_id': {'readonly': True}, + 'is_latest': {'readonly': True}, 'pipeline_name': {'readonly': True}, 'parameters': {'readonly': True}, 'invoked_by': {'readonly': True}, @@ -61,6 +69,8 @@ class PipelineRun(Model): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'run_id': {'key': 'runId', 'type': 'str'}, + 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, + 'is_latest': {'key': 'isLatest', 'type': 'bool'}, 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{str}'}, 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, @@ -76,6 +86,8 @@ def __init__(self, *, additional_properties=None, **kwargs) -> None: super(PipelineRun, self).__init__(**kwargs) self.additional_properties = additional_properties self.run_id = None + self.run_group_id = None + self.is_latest = None self.pipeline_name = None self.parameters = None self.invoked_by = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py index af16c6c89cd2..f8ce5bd0803e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py @@ -29,7 +29,7 @@ class PostgreSqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py index 5e7e674a2447..0221aa620064 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py @@ -29,7 +29,7 @@ class PostgreSqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source.py new file mode 100644 index 000000000000..51dd25b25c60 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class PostgreSqlSource(CopySource): + """A copy activity source for PostgreSQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PostgreSqlSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'PostgreSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source_py3.py new file mode 100644 index 000000000000..8aa12e4bdf8d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class PostgreSqlSource(CopySource): + """A copy activity source for PostgreSQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'PostgreSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset.py new file mode 100644 index 000000000000..031a2479815b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class PostgreSqlTableDataset(Dataset): + """The PostgreSQL table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The PostgreSQL table name. Type: string (or Expression with + resultType string). + :type table: object + :param postgre_sql_table_dataset_schema: The PostgreSQL schema name. Type: + string (or Expression with resultType string). + :type postgre_sql_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PostgreSqlTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.postgre_sql_table_dataset_schema = kwargs.get('postgre_sql_table_dataset_schema', None) + self.type = 'PostgreSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset_py3.py new file mode 100644 index 000000000000..8adb7bd409ea --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class PostgreSqlTableDataset(Dataset): + """The PostgreSQL table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The PostgreSQL table name. Type: string (or Expression with + resultType string). + :type table: object + :param postgre_sql_table_dataset_schema: The PostgreSQL schema name. Type: + string (or Expression with resultType string). + :type postgre_sql_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, postgre_sql_table_dataset_schema=None, **kwargs) -> None: + super(PostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.postgre_sql_table_dataset_schema = postgre_sql_table_dataset_schema + self.type = 'PostgreSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py index abf4adde8515..21f18f07b262 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py @@ -29,7 +29,7 @@ class PrestoLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py index fe178f62df4f..75ab99d5a58f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py @@ -29,7 +29,7 @@ class PrestoLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py index 35ceaa1389a7..eb80e1a97750 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py @@ -43,9 +43,15 @@ class PrestoObjectDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Presto. Type: string (or Expression + with resultType string). + :type table: object + :param presto_object_dataset_schema: The schema name of the Presto. Type: + string (or Expression with resultType string). + :type presto_object_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class PrestoObjectDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'presto_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, **kwargs): super(PrestoObjectDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.presto_object_dataset_schema = kwargs.get('presto_object_dataset_schema', None) self.type = 'PrestoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py index 193004e2c381..e3bd2f7e36aa 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py @@ -43,9 +43,15 @@ class PrestoObjectDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Presto. Type: string (or Expression + with resultType string). + :type table: object + :param presto_object_dataset_schema: The schema name of the Presto. Type: + string (or Expression with resultType string). + :type presto_object_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class PrestoObjectDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'presto_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, presto_object_dataset_schema=None, **kwargs) -> None: super(PrestoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.table = table + self.presto_object_dataset_schema = presto_object_dataset_schema self.type = 'PrestoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py index 333a4e6dca9e..9b7274011265 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py @@ -27,6 +27,10 @@ class PrestoSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class PrestoSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py index ad16115ef8f3..47fe3eb5f790 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py @@ -27,6 +27,10 @@ class PrestoSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class PrestoSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'PrestoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py index c2ca123e5409..6353c1cda96a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py @@ -29,7 +29,7 @@ class QuickBooksLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py index 7ba9f145c26e..be12fc5cfba5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py @@ -29,7 +29,7 @@ class QuickBooksLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py index b8567cd772ed..cce0a026ae5a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py @@ -27,6 +27,10 @@ class QuickBooksSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class QuickBooksSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py index b6bb7a260d1d..a00f35d4e1c1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py @@ -27,6 +27,10 @@ class QuickBooksSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class QuickBooksSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'QuickBooksSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py index 1dc8ff198eb8..2450f31222df 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py @@ -27,6 +27,10 @@ class RelationalSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType @@ -42,6 +46,7 @@ class RelationalSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py index 9e7a75043b8c..f88383cbd729 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py @@ -27,6 +27,10 @@ class RelationalSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType @@ -42,11 +46,12 @@ class RelationalSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'RelationalSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py index e66cf2feebbc..8c5ca2d67f3c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py @@ -31,6 +31,9 @@ class RerunTumblingWindowTrigger(Trigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param parent_trigger: The parent trigger reference. @@ -58,6 +61,7 @@ class RerunTumblingWindowTrigger(Trigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py index eafc3b5743a0..4a7a20759c1b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py @@ -31,6 +31,9 @@ class RerunTumblingWindowTrigger(Trigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param parent_trigger: The parent trigger reference. @@ -58,6 +61,7 @@ class RerunTumblingWindowTrigger(Trigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, @@ -65,8 +69,8 @@ class RerunTumblingWindowTrigger(Trigger): 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, } - def __init__(self, *, requested_start_time, requested_end_time, max_concurrency: int, additional_properties=None, description: str=None, parent_trigger=None, **kwargs) -> None: - super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, **kwargs) + def __init__(self, *, requested_start_time, requested_end_time, max_concurrency: int, additional_properties=None, description: str=None, annotations=None, parent_trigger=None, **kwargs) -> None: + super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) self.parent_trigger = parent_trigger self.requested_start_time = requested_start_time self.requested_end_time = requested_end_time diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py index 9c1b8e4c3cbd..16d1af502787 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py @@ -29,7 +29,7 @@ class ResponsysLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py index 4c1997e6ab26..6d8a74a0a34b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py @@ -29,7 +29,7 @@ class ResponsysLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py index 1e1a9397a6ba..fd25b8e71377 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py @@ -27,6 +27,10 @@ class ResponsysSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class ResponsysSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py index 3bfb9c19a2a7..8d5e4ac091f7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py @@ -27,6 +27,10 @@ class ResponsysSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class ResponsysSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'ResponsysSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py new file mode 100644 index 000000000000..9a5d41858e54 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class RestResourceDataset(Dataset): + """A Rest service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param relative_url: The relative URL to the resource that the RESTful API + provides. Type: string (or Expression with resultType string). + :type relative_url: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page + requests. Type: string (or Expression with resultType string). + :type pagination_rules: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, + 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, + 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RestResourceDataset, self).__init__(**kwargs) + self.relative_url = kwargs.get('relative_url', None) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.pagination_rules = kwargs.get('pagination_rules', None) + self.type = 'RestResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py new file mode 100644 index 000000000000..99f39c97f373 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class RestResourceDataset(Dataset): + """A Rest service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param relative_url: The relative URL to the resource that the RESTful API + provides. Type: string (or Expression with resultType string). + :type relative_url: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page + requests. Type: string (or Expression with resultType string). + :type pagination_rules: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, + 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, + 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, relative_url=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, **kwargs) -> None: + super(RestResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.relative_url = relative_url + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.pagination_rules = pagination_rules + self.type = 'RestResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py new file mode 100644 index 000000000000..0fbb15654438 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py @@ -0,0 +1,107 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class RestServiceLinkedService(LinkedService): + """Rest Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The base URL of the REST service. + :type url: object + :param enable_server_certificate_validation: Whether to validate server + side SSL certificate when connecting to the endpoint.The default value is + true. Type: boolean (or Expression with resultType boolean). + :type enable_server_certificate_validation: object + :param authentication_type: Required. Type of authentication used to + connect to the REST service. Possible values include: 'Anonymous', + 'Basic', 'AadServicePrincipal', 'ManagedServiceIdentity' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.RestServiceAuthenticationType + :param user_name: The user name used in Basic authentication type. + :type user_name: object + :param password: The password used in Basic authentication type. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_id: The application's client ID used in + AadServicePrincipal authentication type. + :type service_principal_id: object + :param service_principal_key: The application's key used in + AadServicePrincipal authentication type. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The tenant information (domain name or tenant ID) used in + AadServicePrincipal authentication type under which your application + resides. + :type tenant: object + :param aad_resource_id: The resource you are requesting authorization to + use. + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RestServiceLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.aad_resource_id = kwargs.get('aad_resource_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'RestService' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py new file mode 100644 index 000000000000..9af9f609e52b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py @@ -0,0 +1,107 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class RestServiceLinkedService(LinkedService): + """Rest Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The base URL of the REST service. + :type url: object + :param enable_server_certificate_validation: Whether to validate server + side SSL certificate when connecting to the endpoint.The default value is + true. Type: boolean (or Expression with resultType boolean). + :type enable_server_certificate_validation: object + :param authentication_type: Required. Type of authentication used to + connect to the REST service. Possible values include: 'Anonymous', + 'Basic', 'AadServicePrincipal', 'ManagedServiceIdentity' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.RestServiceAuthenticationType + :param user_name: The user name used in Basic authentication type. + :type user_name: object + :param password: The password used in Basic authentication type. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_id: The application's client ID used in + AadServicePrincipal authentication type. + :type service_principal_id: object + :param service_principal_key: The application's key used in + AadServicePrincipal authentication type. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The tenant information (domain name or tenant ID) used in + AadServicePrincipal authentication type under which your application + resides. + :type tenant: object + :param aad_resource_id: The resource you are requesting authorization to + use. + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, enable_server_certificate_validation=None, user_name=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, aad_resource_id=None, encrypted_credential=None, **kwargs) -> None: + super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.enable_server_certificate_validation = enable_server_certificate_validation + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.aad_resource_id = aad_resource_id + self.encrypted_credential = encrypted_credential + self.type = 'RestService' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py new file mode 100644 index 000000000000..f32d4d67e427 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class RestSource(CopySource): + """A copy activity Rest service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page + requests. Type: string (or Expression with resultType string). + :type pagination_rules: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP + response. It is the timeout to get a response, not the timeout to read + response data. Default value: 00:01:40. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + :param request_interval: The time to await before sending next page + request. + :type request_interval: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RestSource, self).__init__(**kwargs) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.pagination_rules = kwargs.get('pagination_rules', None) + self.http_request_timeout = kwargs.get('http_request_timeout', None) + self.request_interval = kwargs.get('request_interval', None) + self.type = 'RestSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py new file mode 100644 index 000000000000..5fcbb2f7a76d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class RestSource(CopySource): + """A copy activity Rest service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page + requests. Type: string (or Expression with resultType string). + :type pagination_rules: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP + response. It is the timeout to get a response, not the timeout to read + response data. Default value: 00:01:40. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + :param request_interval: The time to await before sending next page + request. + :type request_interval: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, http_request_timeout=None, request_interval=None, **kwargs) -> None: + super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.pagination_rules = pagination_rules + self.http_request_timeout = http_request_timeout + self.request_interval = request_interval + self.type = 'RestSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py index 63a4cddc063d..7d54150a6815 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py @@ -24,7 +24,7 @@ class RunQueryFilter(Model): TriggerName, TriggerRunTimestamp and Status. Possible values include: 'PipelineName', 'Status', 'RunStart', 'RunEnd', 'ActivityName', 'ActivityRunStart', 'ActivityRunEnd', 'ActivityType', 'TriggerName', - 'TriggerRunTimestamp' + 'TriggerRunTimestamp', 'RunGroupId', 'LatestOnly' :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand :param operator: Required. Operator to be used for filter. Possible values include: 'Equals', 'NotEquals', 'In', 'NotIn' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py index fc95591801bd..814e7a4b499b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py @@ -24,7 +24,7 @@ class RunQueryFilter(Model): TriggerName, TriggerRunTimestamp and Status. Possible values include: 'PipelineName', 'Status', 'RunStart', 'RunEnd', 'ActivityName', 'ActivityRunStart', 'ActivityRunEnd', 'ActivityType', 'TriggerName', - 'TriggerRunTimestamp' + 'TriggerRunTimestamp', 'RunGroupId', 'LatestOnly' :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand :param operator: Required. Operator to be used for filter. Possible values include: 'Equals', 'NotEquals', 'In', 'NotIn' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py index 5804e779d1ef..c644ac664831 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py @@ -29,7 +29,7 @@ class SalesforceLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py index 9fa5287aa3b4..05fcea7a3990 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py @@ -29,7 +29,7 @@ class SalesforceLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py index f3d2861576e4..93b4fcdb3d1f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py @@ -29,7 +29,7 @@ class SalesforceMarketingCloudLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py index 863b679398e1..d7e09e27a43f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py @@ -29,7 +29,7 @@ class SalesforceMarketingCloudLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py index bf08fdaa88bf..09a0eca1758e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py @@ -27,6 +27,10 @@ class SalesforceMarketingCloudSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class SalesforceMarketingCloudSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py index 0a3d26cfb43b..9b898af0c3a1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py @@ -27,6 +27,10 @@ class SalesforceMarketingCloudSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class SalesforceMarketingCloudSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'SalesforceMarketingCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py new file mode 100644 index 000000000000..fb6476ac9a30 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SalesforceServiceCloudLinkedService(LinkedService): + """Linked service for Salesforce Service Cloud. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param environment_url: The URL of Salesforce Service Cloud instance. + Default is 'https://login.salesforce.com'. To copy data from sandbox, + specify 'https://test.salesforce.com'. To copy data from custom domain, + specify, for example, 'https://[domain].my.salesforce.com'. Type: string + (or Expression with resultType string). + :type environment_url: object + :param username: The username for Basic authentication of the Salesforce + instance. Type: string (or Expression with resultType string). + :type username: object + :param password: The password for Basic authentication of the Salesforce + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param security_token: The security token is required to remotely access + Salesforce instance. + :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param extended_properties: Extended properties appended to the connection + string. Type: string (or Expression with resultType string). + :type extended_properties: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceServiceCloudLinkedService, self).__init__(**kwargs) + self.environment_url = kwargs.get('environment_url', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.security_token = kwargs.get('security_token', None) + self.extended_properties = kwargs.get('extended_properties', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SalesforceServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py new file mode 100644 index 000000000000..3f0b3cc64d91 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SalesforceServiceCloudLinkedService(LinkedService): + """Linked service for Salesforce Service Cloud. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param environment_url: The URL of Salesforce Service Cloud instance. + Default is 'https://login.salesforce.com'. To copy data from sandbox, + specify 'https://test.salesforce.com'. To copy data from custom domain, + specify, for example, 'https://[domain].my.salesforce.com'. Type: string + (or Expression with resultType string). + :type environment_url: object + :param username: The username for Basic authentication of the Salesforce + instance. Type: string (or Expression with resultType string). + :type username: object + :param password: The password for Basic authentication of the Salesforce + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param security_token: The security token is required to remotely access + Salesforce instance. + :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param extended_properties: Extended properties appended to the connection + string. Type: string (or Expression with resultType string). + :type extended_properties: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, extended_properties=None, encrypted_credential=None, **kwargs) -> None: + super(SalesforceServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.environment_url = environment_url + self.username = username + self.password = password + self.security_token = security_token + self.extended_properties = extended_properties + self.encrypted_credential = encrypted_credential + self.type = 'SalesforceServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py new file mode 100644 index 000000000000..1f5cb3bb5bf1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SalesforceServiceCloudObjectDataset(Dataset): + """The Salesforce Service Cloud object dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param object_api_name: The Salesforce Service Cloud object API name. + Type: string (or Expression with resultType string). + :type object_api_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceServiceCloudObjectDataset, self).__init__(**kwargs) + self.object_api_name = kwargs.get('object_api_name', None) + self.type = 'SalesforceServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py new file mode 100644 index 000000000000..d215f5f0084d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SalesforceServiceCloudObjectDataset(Dataset): + """The Salesforce Service Cloud object dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param object_api_name: The Salesforce Service Cloud object API name. + Type: string (or Expression with resultType string). + :type object_api_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, object_api_name=None, **kwargs) -> None: + super(SalesforceServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.object_api_name = object_api_name + self.type = 'SalesforceServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py new file mode 100644 index 000000000000..99e2b1a2c924 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class SalesforceServiceCloudSink(CopySink): + """A copy activity Salesforce Service Cloud sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + Insert. Possible values include: 'Insert', 'Upsert' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :param external_id_field_name: The name of the external ID field for + upsert operation. Default value is 'Id' column. Type: string (or + Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore + null values from input dataset (except key fields) during write operation. + Default value is false. If set it to true, it means ADF will leave the + data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, + versus ADF will update the data in the destination object to NULL when + doing upsert/update operation and insert NULL value when doing insert + operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceServiceCloudSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.external_id_field_name = kwargs.get('external_id_field_name', None) + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.type = 'SalesforceServiceCloudSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py new file mode 100644 index 000000000000..2abfaa12d0e7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SalesforceServiceCloudSink(CopySink): + """A copy activity Salesforce Service Cloud sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + Insert. Possible values include: 'Insert', 'Upsert' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :param external_id_field_name: The name of the external ID field for + upsert operation. Default value is 'Id' column. Type: string (or + Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore + null values from input dataset (except key fields) during write operation. + Default value is false. If set it to true, it means ADF will leave the + data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, + versus ADF will update the data in the destination object to NULL when + doing upsert/update operation and insert NULL value when doing insert + operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: + super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.external_id_field_name = external_id_field_name + self.ignore_null_values = ignore_null_values + self.type = 'SalesforceServiceCloudSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py new file mode 100644 index 000000000000..255bfab477bc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SalesforceServiceCloudSource(CopySource): + """A copy activity Salesforce Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is + Query. Possible values include: 'Query', 'QueryAll' + :type read_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SalesforceServiceCloudSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.read_behavior = kwargs.get('read_behavior', None) + self.type = 'SalesforceServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py new file mode 100644 index 000000000000..77bb267f5a47 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SalesforceServiceCloudSource(CopySource): + """A copy activity Salesforce Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is + Query. Possible values include: 'Query', 'QueryAll' + :type read_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: + super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.read_behavior = read_behavior + self.type = 'SalesforceServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py index 525aaccd49be..9a1291bd4bfe 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py @@ -34,6 +34,10 @@ class SalesforceSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -65,6 +69,7 @@ class SalesforceSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py index 6db44ebb4228..54a56618d01e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py @@ -34,6 +34,10 @@ class SalesforceSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -65,14 +69,15 @@ class SalesforceSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: - super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: + super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name self.ignore_null_values = ignore_null_values diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py index 8442a716c842..4f2590c3ab9d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py @@ -27,6 +27,10 @@ class SalesforceSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType @@ -46,6 +50,7 @@ class SalesforceSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py index 9ebc65ddeec8..4441e92eaff3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py @@ -27,6 +27,10 @@ class SalesforceSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType @@ -46,13 +50,14 @@ class SalesforceSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, read_behavior=None, **kwargs) -> None: - super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: + super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.read_behavior = read_behavior self.type = 'SalesforceSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset.py new file mode 100644 index 000000000000..048d26f85696 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SapBwCubeDataset(Dataset): + """The SAP BW cube dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SapBwCubeDataset, self).__init__(**kwargs) + self.type = 'SapBwCube' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset_py3.py new file mode 100644 index 000000000000..08334a824ba4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset_py3.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapBwCubeDataset(Dataset): + """The SAP BW cube dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(SapBwCubeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'SapBwCube' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py index 2fbb906559bc..a57164c7215d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py @@ -29,7 +29,7 @@ class SapBWLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py index a1f6133e558d..92aef25dc215 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py @@ -29,7 +29,7 @@ class SapBWLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source.py new file mode 100644 index 000000000000..e3762d8e694e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SapBwSource(CopySource): + """A copy activity source for SapBW server via MDX. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: MDX query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapBwSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'SapBwSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source_py3.py new file mode 100644 index 000000000000..ed6ff734742d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapBwSource(CopySource): + """A copy activity source for SapBW server via MDX. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: MDX query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SapBwSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py index 5c9a6c2deb00..53d47ab8ae41 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py @@ -29,7 +29,7 @@ class SapCloudForCustomerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py index 85c1100d01eb..9e47fd696503 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py @@ -29,7 +29,7 @@ class SapCloudForCustomerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py index 05d98ec70eaa..e5a37858abb5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py @@ -34,6 +34,10 @@ class SapCloudForCustomerSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -52,6 +56,7 @@ class SapCloudForCustomerSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py index f3cd45263f3e..29f01fdd6891 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py @@ -34,6 +34,10 @@ class SapCloudForCustomerSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -52,11 +56,12 @@ class SapCloudForCustomerSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, write_behavior=None, **kwargs) -> None: - super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.type = 'SapCloudForCustomerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py index c8dedf91e188..561c1b342f93 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py @@ -27,6 +27,10 @@ class SapCloudForCustomerSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: SAP Cloud for Customer OData query. For example, "$top=1". @@ -42,6 +46,7 @@ class SapCloudForCustomerSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py index ab5bddf21be3..e9dab6ad1899 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py @@ -27,6 +27,10 @@ class SapCloudForCustomerSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: SAP Cloud for Customer OData query. For example, "$top=1". @@ -42,11 +46,12 @@ class SapCloudForCustomerSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'SapCloudForCustomerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py index 4303b2f9cbca..0ca69242055f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py @@ -29,7 +29,7 @@ class SapEccLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py index 24490fb39a9a..7afd76b8fe09 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py @@ -29,7 +29,7 @@ class SapEccLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py index e4f10113aecd..f79367f49b3d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py @@ -45,7 +45,7 @@ class SapEccResourceDataset(Dataset): :type type: str :param path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). - :type path: str + :type path: object """ _validation = { @@ -64,7 +64,7 @@ class SapEccResourceDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py index 08bf742dc415..76aaeb9bb9f2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py @@ -45,7 +45,7 @@ class SapEccResourceDataset(Dataset): :type type: str :param path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). - :type path: str + :type path: object """ _validation = { @@ -64,10 +64,10 @@ class SapEccResourceDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, } - def __init__(self, *, linked_service_name, path: str, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: super(SapEccResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.path = path self.type = 'SapEccResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py index 84aa047e6d8a..6379c33713d4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py @@ -27,11 +27,15 @@ class SapEccSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :type query: str + :type query: object """ _validation = { @@ -42,8 +46,9 @@ class SapEccSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py index f8993720428c..4412cac39960 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py @@ -27,11 +27,15 @@ class SapEccSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :type query: str + :type query: object """ _validation = { @@ -42,11 +46,12 @@ class SapEccSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query: str=None, **kwargs) -> None: - super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'SapEccSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py index 0c2dbec28558..14eda87b7be6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py @@ -29,10 +29,13 @@ class SapHanaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str + :param connection_string: SAP HANA ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object :param server: Required. Host name of the SAP HANA server. Type: string (or Expression with resultType string). :type server: object @@ -63,6 +66,7 @@ class SapHanaLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, @@ -72,6 +76,7 @@ class SapHanaLinkedService(LinkedService): def __init__(self, **kwargs): super(SapHanaLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) self.server = kwargs.get('server', None) self.authentication_type = kwargs.get('authentication_type', None) self.user_name = kwargs.get('user_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py index c906d74d0c2b..de378a5b2bf3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py @@ -29,10 +29,13 @@ class SapHanaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str + :param connection_string: SAP HANA ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object :param server: Required. Host name of the SAP HANA server. Type: string (or Expression with resultType string). :type server: object @@ -63,6 +66,7 @@ class SapHanaLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, @@ -70,8 +74,9 @@ class SapHanaLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, server, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, server, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string self.server = server self.authentication_type = authentication_type self.user_name = user_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py new file mode 100644 index 000000000000..e946dbcd9a50 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SapHanaSource(CopySource): + """A copy activity source for SAP HANA source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP HANA Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param packet_size: The packet size of data read from SAP HANA. Type: + integer(or Expression with resultType integer). + :type packet_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'packet_size': {'key': 'packetSize', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapHanaSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.packet_size = kwargs.get('packet_size', None) + self.type = 'SapHanaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py new file mode 100644 index 000000000000..730326c19183 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapHanaSource(CopySource): + """A copy activity source for SAP HANA source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP HANA Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param packet_size: The packet size of data read from SAP HANA. Type: + integer(or Expression with resultType integer). + :type packet_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'packet_size': {'key': 'packetSize', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, packet_size=None, **kwargs) -> None: + super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.packet_size = packet_size + self.type = 'SapHanaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py new file mode 100644 index 000000000000..6ff1ae31cd22 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SapHanaTableDataset(Dataset): + """SAP HANA Table properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param sap_hana_table_dataset_schema: The schema name of SAP HANA. Type: + string (or Expression with resultType string). + :type sap_hana_table_dataset_schema: object + :param table: The table name of SAP HANA. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sap_hana_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapHanaTableDataset, self).__init__(**kwargs) + self.sap_hana_table_dataset_schema = kwargs.get('sap_hana_table_dataset_schema', None) + self.table = kwargs.get('table', None) + self.type = 'SapHanaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py new file mode 100644 index 000000000000..6dc5c48ba21d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapHanaTableDataset(Dataset): + """SAP HANA Table properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param sap_hana_table_dataset_schema: The schema name of SAP HANA. Type: + string (or Expression with resultType string). + :type sap_hana_table_dataset_schema: object + :param table: The table name of SAP HANA. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sap_hana_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, sap_hana_table_dataset_schema=None, table=None, **kwargs) -> None: + super(SapHanaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.sap_hana_table_dataset_schema = sap_hana_table_dataset_schema + self.table = table + self.type = 'SapHanaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py new file mode 100644 index 000000000000..bfe9c323d302 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SapOpenHubLinkedService(LinkedService): + """SAP Business Warehouse Open Hub Destination Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Host name of the SAP BW instance where the open + hub destination is located. Type: string (or Expression with resultType + string). + :type server: object + :param system_number: Required. System number of the BW system where the + open hub destination is located. (Usually a two-digit decimal number + represented as a string.) Type: string (or Expression with resultType + string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system where + the open hub destination is located. (Usually a three-digit decimal number + represented as a string) Type: string (or Expression with resultType + string). + :type client_id: object + :param language: Language of the BW system where the open hub destination + is located. The default value is EN. Type: string (or Expression with + resultType string). + :type language: object + :param user_name: Username to access the SAP BW server where the open hub + destination is located. Type: string (or Expression with resultType + string). + :type user_name: object + :param password: Password to access the SAP BW server where the open hub + destination is located. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'system_number': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapOpenHubLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.system_number = kwargs.get('system_number', None) + self.client_id = kwargs.get('client_id', None) + self.language = kwargs.get('language', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SapOpenHub' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py new file mode 100644 index 000000000000..eddc50b0f1c5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SapOpenHubLinkedService(LinkedService): + """SAP Business Warehouse Open Hub Destination Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Host name of the SAP BW instance where the open + hub destination is located. Type: string (or Expression with resultType + string). + :type server: object + :param system_number: Required. System number of the BW system where the + open hub destination is located. (Usually a two-digit decimal number + represented as a string.) Type: string (or Expression with resultType + string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system where + the open hub destination is located. (Usually a three-digit decimal number + represented as a string) Type: string (or Expression with resultType + string). + :type client_id: object + :param language: Language of the BW system where the open hub destination + is located. The default value is EN. Type: string (or Expression with + resultType string). + :type language: object + :param user_name: Username to access the SAP BW server where the open hub + destination is located. Type: string (or Expression with resultType + string). + :type user_name: object + :param password: Password to access the SAP BW server where the open hub + destination is located. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'system_number': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, system_number, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, language=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SapOpenHubLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.system_number = system_number + self.client_id = client_id + self.language = language + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SapOpenHub' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py new file mode 100644 index 000000000000..d6dcbda60e36 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SapOpenHubSource(CopySource): + """A copy activity source for SAP Business Warehouse Open Hub Destination + source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param exclude_last_request: Whether to exclude the records of the last + request. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is + set, only data with requestId larger than the value of this property will + be retrieved. The default value is 0. Type: integer (or Expression with + resultType integer ). + :type base_request_id: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapOpenHubSource, self).__init__(**kwargs) + self.exclude_last_request = kwargs.get('exclude_last_request', None) + self.base_request_id = kwargs.get('base_request_id', None) + self.type = 'SapOpenHubSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py new file mode 100644 index 000000000000..752ffd8554b0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapOpenHubSource(CopySource): + """A copy activity source for SAP Business Warehouse Open Hub Destination + source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param exclude_last_request: Whether to exclude the records of the last + request. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is + set, only data with requestId larger than the value of this property will + be retrieved. The default value is 0. Type: integer (or Expression with + resultType integer ). + :type base_request_id: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: + super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.exclude_last_request = exclude_last_request + self.base_request_id = base_request_id + self.type = 'SapOpenHubSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py new file mode 100644 index 000000000000..2682969c5016 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SapOpenHubTableDataset(Dataset): + """Sap Business Warehouse Open Hub Destination Table properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param open_hub_destination_name: Required. The name of the Open Hub + Destination with destination type as Database Table. Type: string (or + Expression with resultType string). + :type open_hub_destination_name: object + :param exclude_last_request: Whether to exclude the records of the last + request. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is + set, only data with requestId larger than the value of this property will + be retrieved. The default value is 0. Type: integer (or Expression with + resultType integer ). + :type base_request_id: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'open_hub_destination_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, + 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapOpenHubTableDataset, self).__init__(**kwargs) + self.open_hub_destination_name = kwargs.get('open_hub_destination_name', None) + self.exclude_last_request = kwargs.get('exclude_last_request', None) + self.base_request_id = kwargs.get('base_request_id', None) + self.type = 'SapOpenHubTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py new file mode 100644 index 000000000000..b06a53c10db3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapOpenHubTableDataset(Dataset): + """Sap Business Warehouse Open Hub Destination Table properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param open_hub_destination_name: Required. The name of the Open Hub + Destination with destination type as Database Table. Type: string (or + Expression with resultType string). + :type open_hub_destination_name: object + :param exclude_last_request: Whether to exclude the records of the last + request. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is + set, only data with requestId larger than the value of this property will + be retrieved. The default value is 0. Type: integer (or Expression with + resultType integer ). + :type base_request_id: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'open_hub_destination_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, + 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, open_hub_destination_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: + super(SapOpenHubTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.open_hub_destination_name = open_hub_destination_name + self.exclude_last_request = exclude_last_request + self.base_request_id = base_request_id + self.type = 'SapOpenHubTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py new file mode 100644 index 000000000000..83b76d0a4fdd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py @@ -0,0 +1,140 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SapTableLinkedService(LinkedService): + """SAP Table Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Host name of the SAP instance where the table is located. + Type: string (or Expression with resultType string). + :type server: object + :param system_number: System number of the SAP system where the table is + located. (Usually a two-digit decimal number represented as a string.) + Type: string (or Expression with resultType string). + :type system_number: object + :param client_id: Client ID of the client on the SAP system where the + table is located. (Usually a three-digit decimal number represented as a + string) Type: string (or Expression with resultType string). + :type client_id: object + :param language: Language of the SAP system where the table is located. + The default value is EN. Type: string (or Expression with resultType + string). + :type language: object + :param system_id: SystemID of the SAP system where the table is located. + Type: string (or Expression with resultType string). + :type system_id: object + :param user_name: Username to access the SAP server where the table is + located. Type: string (or Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP server where the table is + located. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param message_server: The hostname of the SAP Message Server. Type: + string (or Expression with resultType string). + :type message_server: object + :param message_server_service: The service name or port number of the + Message Server. Type: string (or Expression with resultType string). + :type message_server_service: object + :param snc_mode: SNC activation indicator to access the SAP server where + the table is located. Must be either 0 (off) or 1 (on). Type: string (or + Expression with resultType string). + :type snc_mode: object + :param snc_my_name: Initiator's SNC name to access the SAP server where + the table is located. Type: string (or Expression with resultType string). + :type snc_my_name: object + :param snc_partner_name: Communication partner's SNC name to access the + SAP server where the table is located. Type: string (or Expression with + resultType string). + :type snc_partner_name: object + :param snc_library_path: External security product's library to access the + SAP server where the table is located. Type: string (or Expression with + resultType string). + :type snc_library_path: object + :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, + 8, 9. Type: string (or Expression with resultType string). + :type snc_qop: object + :param logon_group: The Logon Group for the SAP System. Type: string (or + Expression with resultType string). + :type logon_group: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, + 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, + 'snc_mode': {'key': 'typeProperties.sncMode', 'type': 'object'}, + 'snc_my_name': {'key': 'typeProperties.sncMyName', 'type': 'object'}, + 'snc_partner_name': {'key': 'typeProperties.sncPartnerName', 'type': 'object'}, + 'snc_library_path': {'key': 'typeProperties.sncLibraryPath', 'type': 'object'}, + 'snc_qop': {'key': 'typeProperties.sncQop', 'type': 'object'}, + 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapTableLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.system_number = kwargs.get('system_number', None) + self.client_id = kwargs.get('client_id', None) + self.language = kwargs.get('language', None) + self.system_id = kwargs.get('system_id', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.message_server = kwargs.get('message_server', None) + self.message_server_service = kwargs.get('message_server_service', None) + self.snc_mode = kwargs.get('snc_mode', None) + self.snc_my_name = kwargs.get('snc_my_name', None) + self.snc_partner_name = kwargs.get('snc_partner_name', None) + self.snc_library_path = kwargs.get('snc_library_path', None) + self.snc_qop = kwargs.get('snc_qop', None) + self.logon_group = kwargs.get('logon_group', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SapTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py new file mode 100644 index 000000000000..d098acc1bbda --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py @@ -0,0 +1,140 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SapTableLinkedService(LinkedService): + """SAP Table Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Host name of the SAP instance where the table is located. + Type: string (or Expression with resultType string). + :type server: object + :param system_number: System number of the SAP system where the table is + located. (Usually a two-digit decimal number represented as a string.) + Type: string (or Expression with resultType string). + :type system_number: object + :param client_id: Client ID of the client on the SAP system where the + table is located. (Usually a three-digit decimal number represented as a + string) Type: string (or Expression with resultType string). + :type client_id: object + :param language: Language of the SAP system where the table is located. + The default value is EN. Type: string (or Expression with resultType + string). + :type language: object + :param system_id: SystemID of the SAP system where the table is located. + Type: string (or Expression with resultType string). + :type system_id: object + :param user_name: Username to access the SAP server where the table is + located. Type: string (or Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP server where the table is + located. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param message_server: The hostname of the SAP Message Server. Type: + string (or Expression with resultType string). + :type message_server: object + :param message_server_service: The service name or port number of the + Message Server. Type: string (or Expression with resultType string). + :type message_server_service: object + :param snc_mode: SNC activation indicator to access the SAP server where + the table is located. Must be either 0 (off) or 1 (on). Type: string (or + Expression with resultType string). + :type snc_mode: object + :param snc_my_name: Initiator's SNC name to access the SAP server where + the table is located. Type: string (or Expression with resultType string). + :type snc_my_name: object + :param snc_partner_name: Communication partner's SNC name to access the + SAP server where the table is located. Type: string (or Expression with + resultType string). + :type snc_partner_name: object + :param snc_library_path: External security product's library to access the + SAP server where the table is located. Type: string (or Expression with + resultType string). + :type snc_library_path: object + :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, + 8, 9. Type: string (or Expression with resultType string). + :type snc_qop: object + :param logon_group: The Logon Group for the SAP System. Type: string (or + Expression with resultType string). + :type logon_group: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, + 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, + 'snc_mode': {'key': 'typeProperties.sncMode', 'type': 'object'}, + 'snc_my_name': {'key': 'typeProperties.sncMyName', 'type': 'object'}, + 'snc_partner_name': {'key': 'typeProperties.sncPartnerName', 'type': 'object'}, + 'snc_library_path': {'key': 'typeProperties.sncLibraryPath', 'type': 'object'}, + 'snc_qop': {'key': 'typeProperties.sncQop', 'type': 'object'}, + 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, server=None, system_number=None, client_id=None, language=None, system_id=None, user_name=None, password=None, message_server=None, message_server_service=None, snc_mode=None, snc_my_name=None, snc_partner_name=None, snc_library_path=None, snc_qop=None, logon_group=None, encrypted_credential=None, **kwargs) -> None: + super(SapTableLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.system_number = system_number + self.client_id = client_id + self.language = language + self.system_id = system_id + self.user_name = user_name + self.password = password + self.message_server = message_server + self.message_server_service = message_server_service + self.snc_mode = snc_mode + self.snc_my_name = snc_my_name + self.snc_partner_name = snc_partner_name + self.snc_library_path = snc_library_path + self.snc_qop = snc_qop + self.logon_group = logon_group + self.encrypted_credential = encrypted_credential + self.type = 'SapTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py new file mode 100644 index 000000000000..b688fe16683b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SapTablePartitionSettings(Model): + """The settings that will be leveraged for SAP table source partitioning. + + :param partition_column_name: The name of the column that will be used for + proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + :param max_partitions_number: The maximum value of partitions the table + will be split into. Type: integer (or Expression with resultType string). + :type max_partitions_number: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapTablePartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) + self.max_partitions_number = kwargs.get('max_partitions_number', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py new file mode 100644 index 000000000000..37bdf610ab35 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SapTablePartitionSettings(Model): + """The settings that will be leveraged for SAP table source partitioning. + + :param partition_column_name: The name of the column that will be used for + proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + :param max_partitions_number: The maximum value of partitions the table + will be split into. Type: integer (or Expression with resultType string). + :type max_partitions_number: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, + } + + def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, max_partitions_number=None, **kwargs) -> None: + super(SapTablePartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound + self.max_partitions_number = max_partitions_number diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py new file mode 100644 index 000000000000..24601ba6b793 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SapTableResourceDataset(Dataset): + """SAP Table Resource properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. The name of the SAP Table. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapTableResourceDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'SapTableResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py new file mode 100644 index 000000000000..7b034ccd3a91 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapTableResourceDataset(Dataset): + """SAP Table Resource properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. The name of the SAP Table. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(SapTableResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'SapTableResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py new file mode 100644 index 000000000000..35799515440e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SapTableSource(CopySource): + """A copy activity source for SAP Table source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param row_count: The number of rows to be retrieved. Type: integer(or + Expression with resultType integer). + :type row_count: object + :param row_skips: The number of rows that will be skipped. Type: integer + (or Expression with resultType integer). + :type row_skips: object + :param rfc_table_fields: The fields of the SAP table that will be + retrieved. For example, column0, column1. Type: string (or Expression with + resultType string). + :type rfc_table_fields: object + :param rfc_table_options: The options for the filtering of the SAP Table. + For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with + resultType string). + :type rfc_table_options: object + :param batch_size: Specifies the maximum number of rows that will be + retrieved at a time when retrieving data from SAP Table. Type: integer (or + Expression with resultType integer). + :type batch_size: object + :param custom_rfc_read_table_function_module: Specifies the custom RFC + function module that will be used to read data from SAP Table. Type: + string (or Expression with resultType string). + :type custom_rfc_read_table_function_module: object + :param partition_option: The partition mechanism that will be used for SAP + table read in parallel. Possible values include: 'None', 'PartitionOnInt', + 'PartitionOnCalendarYear', 'PartitionOnCalendarMonth', + 'PartitionOnCalendarDate', 'PartitionOnTime' + :type partition_option: str or + ~azure.mgmt.datafactory.models.SapTablePartitionOption + :param partition_settings: The settings that will be leveraged for SAP + table source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.SapTablePartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'row_count': {'key': 'rowCount', 'type': 'object'}, + 'row_skips': {'key': 'rowSkips', 'type': 'object'}, + 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, + 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, + } + + def __init__(self, **kwargs): + super(SapTableSource, self).__init__(**kwargs) + self.row_count = kwargs.get('row_count', None) + self.row_skips = kwargs.get('row_skips', None) + self.rfc_table_fields = kwargs.get('rfc_table_fields', None) + self.rfc_table_options = kwargs.get('rfc_table_options', None) + self.batch_size = kwargs.get('batch_size', None) + self.custom_rfc_read_table_function_module = kwargs.get('custom_rfc_read_table_function_module', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) + self.type = 'SapTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py new file mode 100644 index 000000000000..bed7bbb93932 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapTableSource(CopySource): + """A copy activity source for SAP Table source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param row_count: The number of rows to be retrieved. Type: integer(or + Expression with resultType integer). + :type row_count: object + :param row_skips: The number of rows that will be skipped. Type: integer + (or Expression with resultType integer). + :type row_skips: object + :param rfc_table_fields: The fields of the SAP table that will be + retrieved. For example, column0, column1. Type: string (or Expression with + resultType string). + :type rfc_table_fields: object + :param rfc_table_options: The options for the filtering of the SAP Table. + For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with + resultType string). + :type rfc_table_options: object + :param batch_size: Specifies the maximum number of rows that will be + retrieved at a time when retrieving data from SAP Table. Type: integer (or + Expression with resultType integer). + :type batch_size: object + :param custom_rfc_read_table_function_module: Specifies the custom RFC + function module that will be used to read data from SAP Table. Type: + string (or Expression with resultType string). + :type custom_rfc_read_table_function_module: object + :param partition_option: The partition mechanism that will be used for SAP + table read in parallel. Possible values include: 'None', 'PartitionOnInt', + 'PartitionOnCalendarYear', 'PartitionOnCalendarMonth', + 'PartitionOnCalendarDate', 'PartitionOnTime' + :type partition_option: str or + ~azure.mgmt.datafactory.models.SapTablePartitionOption + :param partition_settings: The settings that will be leveraged for SAP + table source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.SapTablePartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'row_count': {'key': 'rowCount', 'type': 'object'}, + 'row_skips': {'key': 'rowSkips', 'type': 'object'}, + 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, + 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, row_count=None, row_skips=None, rfc_table_fields=None, rfc_table_options=None, batch_size=None, custom_rfc_read_table_function_module=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.row_count = row_count + self.row_skips = row_skips + self.rfc_table_fields = rfc_table_fields + self.rfc_table_options = rfc_table_options + self.batch_size = batch_size + self.custom_rfc_read_table_function_module = custom_rfc_read_table_function_module + self.partition_option = partition_option + self.partition_settings = partition_settings + self.type = 'SapTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py index eaebfb4c2553..b9ea331b8c6e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py @@ -30,6 +30,9 @@ class ScheduleTrigger(MultiplePipelineTrigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. @@ -49,6 +52,7 @@ class ScheduleTrigger(MultiplePipelineTrigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py index 1fc148a81b29..f13f01c7fa13 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py @@ -30,6 +30,9 @@ class ScheduleTrigger(MultiplePipelineTrigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. @@ -49,12 +52,13 @@ class ScheduleTrigger(MultiplePipelineTrigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, } - def __init__(self, *, recurrence, additional_properties=None, description: str=None, pipelines=None, **kwargs) -> None: - super(ScheduleTrigger, self).__init__(additional_properties=additional_properties, description=description, pipelines=pipelines, **kwargs) + def __init__(self, *, recurrence, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: + super(ScheduleTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) self.recurrence = recurrence self.type = 'ScheduleTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py index c433366826b8..4d42f575e769 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py @@ -29,7 +29,7 @@ class ServiceNowLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py index cdd9e8ebb718..b9d166f241d6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py @@ -29,7 +29,7 @@ class ServiceNowLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py index 00068f5e5d32..16b10bb8de5e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py @@ -27,6 +27,10 @@ class ServiceNowSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class ServiceNowSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py index ffe72cb426e7..20d1a64d04d3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py @@ -27,6 +27,10 @@ class ServiceNowSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class ServiceNowSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'ServiceNowSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py new file mode 100644 index 000000000000..5b8fd4e42ba2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class SftpLocation(DatasetLocation): + """The location of SFTP dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SftpLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py new file mode 100644 index 000000000000..c5e2feafa971 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class SftpLocation(DatasetLocation): + """The location of SFTP dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(SftpLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings.py new file mode 100644 index 000000000000..5e7b4faf77ad --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings import StoreReadSettings + + +class SftpReadSettings(StoreReadSettings): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Sftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SftpReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings_py3.py new file mode 100644 index 000000000000..e6c27e3ad08a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class SftpReadSettings(StoreReadSettings): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Sftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(SftpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py index 31a9d5524f36..aa4c535fc514 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py @@ -29,7 +29,7 @@ class SftpServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py index 581e8f2a0f8e..7decd7781348 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py @@ -29,7 +29,7 @@ class SftpServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py index b57922620ef8..ee5311dceb7a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py @@ -29,7 +29,7 @@ class ShopifyLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py index 714de7f0ddf6..ea6189277552 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py @@ -29,7 +29,7 @@ class ShopifyLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py index 3006ede4633d..d4596976d459 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py @@ -27,6 +27,10 @@ class ShopifySource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class ShopifySource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py index ec17bdce3e35..6b56edd62904 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py @@ -27,6 +27,10 @@ class ShopifySource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class ShopifySource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'ShopifySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py index 006311c492bb..4f9ab49a7bba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py @@ -29,7 +29,7 @@ class SparkLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py index c5e20deef8e8..f6433b6ab187 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py @@ -29,7 +29,7 @@ class SparkLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py index 8d1493ea9c7f..bdbdf067e1ea 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py @@ -43,9 +43,15 @@ class SparkObjectDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Spark. Type: string (or Expression + with resultType string). + :type table: object + :param spark_object_dataset_schema: The schema name of the Spark. Type: + string (or Expression with resultType string). + :type spark_object_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class SparkObjectDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'spark_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, **kwargs): super(SparkObjectDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.spark_object_dataset_schema = kwargs.get('spark_object_dataset_schema', None) self.type = 'SparkObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py index 3ab167dd3540..afe383951f1c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py @@ -43,9 +43,15 @@ class SparkObjectDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Spark. Type: string (or Expression + with resultType string). + :type table: object + :param spark_object_dataset_schema: The schema name of the Spark. Type: + string (or Expression with resultType string). + :type spark_object_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class SparkObjectDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'spark_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, spark_object_dataset_schema=None, **kwargs) -> None: super(SparkObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.table = table + self.spark_object_dataset_schema = spark_object_dataset_schema self.type = 'SparkObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py index 643a71610930..6d670c1c6b2a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py @@ -27,6 +27,10 @@ class SparkSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class SparkSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py index ede7f9ed5e2b..8da01b0cd823 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py @@ -27,6 +27,10 @@ class SparkSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class SparkSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'SparkSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py index ac12b6e55e59..8fe57eaa3595 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py @@ -34,6 +34,10 @@ class SqlDWSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression @@ -46,6 +50,10 @@ class SqlDWSink(CopySink): :param poly_base_settings: Specifies PolyBase-related settings when allowPolyBase is true. :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object """ _validation = { @@ -58,10 +66,12 @@ class SqlDWSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__(self, **kwargs): @@ -69,4 +79,5 @@ def __init__(self, **kwargs): self.pre_copy_script = kwargs.get('pre_copy_script', None) self.allow_poly_base = kwargs.get('allow_poly_base', None) self.poly_base_settings = kwargs.get('poly_base_settings', None) + self.table_option = kwargs.get('table_option', None) self.type = 'SqlDWSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py index 2b2d44cf16c6..6f9241560e59 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py @@ -34,6 +34,10 @@ class SqlDWSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression @@ -46,6 +50,10 @@ class SqlDWSink(CopySink): :param poly_base_settings: Specifies PolyBase-related settings when allowPolyBase is true. :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object """ _validation = { @@ -58,15 +66,18 @@ class SqlDWSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, pre_copy_script=None, allow_poly_base=None, poly_base_settings=None, **kwargs) -> None: - super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, allow_poly_base=None, poly_base_settings=None, table_option=None, **kwargs) -> None: + super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.allow_poly_base = allow_poly_base self.poly_base_settings = poly_base_settings + self.table_option = table_option self.type = 'SqlDWSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py index aa3f88a75938..1a020672f7c2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py @@ -27,6 +27,10 @@ class SqlDWSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or @@ -51,6 +55,7 @@ class SqlDWSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py index b74c004141d1..ae8fe605024f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py @@ -27,6 +27,10 @@ class SqlDWSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or @@ -51,14 +55,15 @@ class SqlDWSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: - super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: + super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py new file mode 100644 index 000000000000..6a11990fc720 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class SqlMISink(CopySink): + """A copy activity Azure SQL Managed Instance sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlMISink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) + self.type = 'SqlMISink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py new file mode 100644 index 000000000000..16fe41cf47f7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SqlMISink(CopySink): + """A copy activity Azure SQL Managed Instance sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: + super(SqlMISink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option + self.type = 'SqlMISink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py new file mode 100644 index 000000000000..4d4db9b09281 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SqlMISource(CopySource): + """A copy activity Azure SQL Managed Instance source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a Azure SQL Managed Instance source. This cannot be used at the same time + as SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlMISource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.type = 'SqlMISource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py new file mode 100644 index 000000000000..952bc7b4da4f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SqlMISource(CopySource): + """A copy activity Azure SQL Managed Instance source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a Azure SQL Managed Instance source. This cannot be used at the same time + as SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types + self.type = 'SqlMISource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py index 36230c046278..45d342212ea4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py @@ -29,7 +29,7 @@ class SqlServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py index fb446a12f601..3eb8c5063dc1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py @@ -29,7 +29,7 @@ class SqlServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py new file mode 100644 index 000000000000..b3cbe492bbf2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class SqlServerSink(CopySink): + """A copy activity SQL server sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlServerSink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) + self.type = 'SqlServerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py new file mode 100644 index 000000000000..dd5daf2c5660 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SqlServerSink(CopySink): + """A copy activity SQL server sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: + super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option + self.type = 'SqlServerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py new file mode 100644 index 000000000000..f9aa011047ea --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SqlServerSource(CopySource): + """A copy activity SQL server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlServerSource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.type = 'SqlServerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py new file mode 100644 index 000000000000..27d12985e595 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SqlServerSource(CopySource): + """A copy activity SQL server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types + self.type = 'SqlServerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py index c0bc0b66a5e2..3998671ee8ae 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py @@ -43,15 +43,20 @@ class SqlServerTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: Required. The table name of the SQL Server dataset. - Type: string (or Expression with resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param sql_server_table_dataset_schema: The schema name of the SQL Server + dataset. Type: string (or Expression with resultType string). + :type sql_server_table_dataset_schema: object + :param table: The table name of the SQL Server dataset. Type: string (or + Expression with resultType string). + :type table: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { @@ -65,9 +70,13 @@ class SqlServerTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'sql_server_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__(self, **kwargs): super(SqlServerTableDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.sql_server_table_dataset_schema = kwargs.get('sql_server_table_dataset_schema', None) + self.table = kwargs.get('table', None) self.type = 'SqlServerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py index 0fb8d10ea111..989780c9bfda 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py @@ -43,15 +43,20 @@ class SqlServerTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: Required. The table name of the SQL Server dataset. - Type: string (or Expression with resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param sql_server_table_dataset_schema: The schema name of the SQL Server + dataset. Type: string (or Expression with resultType string). + :type sql_server_table_dataset_schema: object + :param table: The table name of the SQL Server dataset. Type: string (or + Expression with resultType string). + :type table: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { @@ -65,9 +70,13 @@ class SqlServerTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'sql_server_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, sql_server_table_dataset_schema=None, table=None, **kwargs) -> None: super(SqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.sql_server_table_dataset_schema = sql_server_table_dataset_schema + self.table = table self.type = 'SqlServerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py index 77692817100d..3a81c5f7ea2f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py @@ -34,6 +34,10 @@ class SqlSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -48,6 +52,14 @@ class SqlSink(CopySink): :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object """ _validation = { @@ -60,11 +72,14 @@ class SqlSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__(self, **kwargs): @@ -73,4 +88,6 @@ def __init__(self, **kwargs): self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) self.pre_copy_script = kwargs.get('pre_copy_script', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) self.type = 'SqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py index 5aa68f696f16..d33810d9abef 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py @@ -34,6 +34,10 @@ class SqlSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -48,6 +52,14 @@ class SqlSink(CopySink): :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object """ _validation = { @@ -60,17 +72,22 @@ class SqlSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, **kwargs) -> None: - super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: + super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option self.type = 'SqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py index 3f374b19f072..bb31474b1f7c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py @@ -27,6 +27,10 @@ class SqlSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param sql_reader_query: SQL reader query. Type: string (or Expression @@ -50,6 +54,7 @@ class SqlSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py index ff39b6768a9f..dcad458fd4a6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py @@ -27,6 +27,10 @@ class SqlSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param sql_reader_query: SQL reader query. Type: string (or Expression @@ -50,14 +54,15 @@ class SqlSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: - super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: + super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py index 4e9df2b68e62..4edfc8b211f7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py @@ -29,7 +29,7 @@ class SquareLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py index 0b9218efba97..40719f600a18 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py @@ -29,7 +29,7 @@ class SquareLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py index 919abc0b19fa..f083df43f13a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py @@ -27,6 +27,10 @@ class SquareSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class SquareSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py index f7ba625398af..ec8a741d564c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py @@ -27,6 +27,10 @@ class SquareSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class SquareSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'SquareSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py new file mode 100644 index 000000000000..63512fdec4d8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISAccessCredential(Model): + """SSIS access credential. + + All required parameters must be populated in order to send to Azure. + + :param domain: Required. Domain for windows authentication. + :type domain: object + :param user_name: Required. UseName for windows authentication. + :type user_name: object + :param password: Required. Password for windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'domain': {'required': True}, + 'user_name': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'domain': {'key': 'domain', 'type': 'object'}, + 'user_name': {'key': 'userName', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + } + + def __init__(self, **kwargs): + super(SSISAccessCredential, self).__init__(**kwargs) + self.domain = kwargs.get('domain', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py new file mode 100644 index 000000000000..5df0fc8941da --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISAccessCredential(Model): + """SSIS access credential. + + All required parameters must be populated in order to send to Azure. + + :param domain: Required. Domain for windows authentication. + :type domain: object + :param user_name: Required. UseName for windows authentication. + :type user_name: object + :param password: Required. Password for windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'domain': {'required': True}, + 'user_name': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'domain': {'key': 'domain', 'type': 'object'}, + 'user_name': {'key': 'userName', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + } + + def __init__(self, *, domain, user_name, password, **kwargs) -> None: + super(SSISAccessCredential, self).__init__(**kwargs) + self.domain = domain + self.user_name = user_name + self.password = password diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py new file mode 100644 index 000000000000..5dff9764e2a2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata import SsisObjectMetadata + + +class SsisEnvironment(SsisObjectMetadata): + """Ssis environment. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains environment. + :type folder_id: long + :param variables: Variable in environment + :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, + } + + def __init__(self, **kwargs): + super(SsisEnvironment, self).__init__(**kwargs) + self.folder_id = kwargs.get('folder_id', None) + self.variables = kwargs.get('variables', None) + self.type = 'Environment' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py new file mode 100644 index 000000000000..43697ba62146 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata_py3 import SsisObjectMetadata + + +class SsisEnvironment(SsisObjectMetadata): + """Ssis environment. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains environment. + :type folder_id: long + :param variables: Variable in environment + :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, variables=None, **kwargs) -> None: + super(SsisEnvironment, self).__init__(id=id, name=name, description=description, **kwargs) + self.folder_id = folder_id + self.variables = variables + self.type = 'Environment' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py new file mode 100644 index 000000000000..e7d31d369392 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisEnvironmentReference(Model): + """Ssis environment reference. + + :param id: Environment reference id. + :type id: long + :param environment_folder_name: Environment folder name. + :type environment_folder_name: str + :param environment_name: Environment name. + :type environment_name: str + :param reference_type: Reference type + :type reference_type: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'environment_folder_name': {'key': 'environmentFolderName', 'type': 'str'}, + 'environment_name': {'key': 'environmentName', 'type': 'str'}, + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisEnvironmentReference, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.environment_folder_name = kwargs.get('environment_folder_name', None) + self.environment_name = kwargs.get('environment_name', None) + self.reference_type = kwargs.get('reference_type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py new file mode 100644 index 000000000000..14cbfca99d4f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisEnvironmentReference(Model): + """Ssis environment reference. + + :param id: Environment reference id. + :type id: long + :param environment_folder_name: Environment folder name. + :type environment_folder_name: str + :param environment_name: Environment name. + :type environment_name: str + :param reference_type: Reference type + :type reference_type: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'environment_folder_name': {'key': 'environmentFolderName', 'type': 'str'}, + 'environment_name': {'key': 'environmentName', 'type': 'str'}, + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, environment_folder_name: str=None, environment_name: str=None, reference_type: str=None, **kwargs) -> None: + super(SsisEnvironmentReference, self).__init__(**kwargs) + self.id = id + self.environment_folder_name = environment_folder_name + self.environment_name = environment_name + self.reference_type = reference_type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py new file mode 100644 index 000000000000..350b0d92852b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata import SsisObjectMetadata + + +class SsisFolder(SsisObjectMetadata): + """Ssis folder. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisFolder, self).__init__(**kwargs) + self.type = 'Folder' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py new file mode 100644 index 000000000000..d6483fda2c08 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata_py3 import SsisObjectMetadata + + +class SsisFolder(SsisObjectMetadata): + """Ssis folder. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, **kwargs) -> None: + super(SsisFolder, self).__init__(id=id, name=name, description=description, **kwargs) + self.type = 'Folder' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py new file mode 100644 index 000000000000..cfdebe717541 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISLogLocation(Model): + """SSIS package execution log location. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param log_path: Required. The SSIS package execution log path. Type: + string (or Expression with resultType string). + :type log_path: object + :ivar type: Required. The type of SSIS log location. Default value: "File" + . + :vartype type: str + :param access_credential: The package execution log access credential. + :type access_credential: + ~azure.mgmt.datafactory.models.SSISAccessCredential + :param log_refresh_interval: Specifies the interval to refresh log. The + default interval is 5 minutes. Type: string (or Expression with resultType + string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type log_refresh_interval: object + """ + + _validation = { + 'log_path': {'required': True}, + 'type': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'log_path': {'key': 'logPath', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, + 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, + } + + type = "File" + + def __init__(self, **kwargs): + super(SSISLogLocation, self).__init__(**kwargs) + self.log_path = kwargs.get('log_path', None) + self.access_credential = kwargs.get('access_credential', None) + self.log_refresh_interval = kwargs.get('log_refresh_interval', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py new file mode 100644 index 000000000000..de4fbe35dcb3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISLogLocation(Model): + """SSIS package execution log location. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param log_path: Required. The SSIS package execution log path. Type: + string (or Expression with resultType string). + :type log_path: object + :ivar type: Required. The type of SSIS log location. Default value: "File" + . + :vartype type: str + :param access_credential: The package execution log access credential. + :type access_credential: + ~azure.mgmt.datafactory.models.SSISAccessCredential + :param log_refresh_interval: Specifies the interval to refresh log. The + default interval is 5 minutes. Type: string (or Expression with resultType + string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type log_refresh_interval: object + """ + + _validation = { + 'log_path': {'required': True}, + 'type': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'log_path': {'key': 'logPath', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, + 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, + } + + type = "File" + + def __init__(self, *, log_path, access_credential=None, log_refresh_interval=None, **kwargs) -> None: + super(SSISLogLocation, self).__init__(**kwargs) + self.log_path = log_path + self.access_credential = access_credential + self.log_refresh_interval = log_refresh_interval diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py index ed7940124645..811075137f41 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py @@ -15,6 +15,9 @@ class SsisObjectMetadata(Model): """SSIS object metadata. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SsisEnvironment, SsisPackage, SsisProject, SsisFolder + All required parameters must be populated in order to send to Azure. :param id: Metadata id. @@ -38,6 +41,10 @@ class SsisObjectMetadata(Model): 'type': {'key': 'type', 'type': 'str'}, } + _subtype_map = { + 'type': {'Environment': 'SsisEnvironment', 'Package': 'SsisPackage', 'Project': 'SsisProject', 'Folder': 'SsisFolder'} + } + def __init__(self, **kwargs): super(SsisObjectMetadata, self).__init__(**kwargs) self.id = kwargs.get('id', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py index b7373e36523c..45f7e15af4fa 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py @@ -15,6 +15,9 @@ class SsisObjectMetadata(Model): """SSIS object metadata. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SsisEnvironment, SsisPackage, SsisProject, SsisFolder + All required parameters must be populated in order to send to Azure. :param id: Metadata id. @@ -38,6 +41,10 @@ class SsisObjectMetadata(Model): 'type': {'key': 'type', 'type': 'str'}, } + _subtype_map = { + 'type': {'Environment': 'SsisEnvironment', 'Package': 'SsisPackage', 'Project': 'SsisProject', 'Folder': 'SsisFolder'} + } + def __init__(self, *, id: int=None, name: str=None, description: str=None, **kwargs) -> None: super(SsisObjectMetadata, self).__init__(**kwargs) self.id = id diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py new file mode 100644 index 000000000000..b04fc1138797 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata import SsisObjectMetadata + + +class SsisPackage(SsisObjectMetadata): + """Ssis Package. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains package. + :type folder_id: long + :param project_version: Project version which contains package. + :type project_version: long + :param project_id: Project id which contains package. + :type project_id: long + :param parameters: Parameters in package + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'project_version': {'key': 'projectVersion', 'type': 'long'}, + 'project_id': {'key': 'projectId', 'type': 'long'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, **kwargs): + super(SsisPackage, self).__init__(**kwargs) + self.folder_id = kwargs.get('folder_id', None) + self.project_version = kwargs.get('project_version', None) + self.project_id = kwargs.get('project_id', None) + self.parameters = kwargs.get('parameters', None) + self.type = 'Package' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py index 81a17eb8fe53..248d0aa9b8ae 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py @@ -20,6 +20,17 @@ class SSISPackageLocation(Model): :param package_path: Required. The SSIS package path. Type: string (or Expression with resultType string). :type package_path: object + :param type: The type of SSIS package location. Possible values include: + 'SSISDB', 'File' + :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType + :param package_password: Password of the package. + :type package_password: ~azure.mgmt.datafactory.models.SecureString + :param access_credential: The package access credential. + :type access_credential: + ~azure.mgmt.datafactory.models.SSISAccessCredential + :param configuration_path: The configuration file of the package + execution. Type: string (or Expression with resultType string). + :type configuration_path: object """ _validation = { @@ -28,8 +39,16 @@ class SSISPackageLocation(Model): _attribute_map = { 'package_path': {'key': 'packagePath', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'package_password': {'key': 'typeProperties.packagePassword', 'type': 'SecureString'}, + 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, + 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, } def __init__(self, **kwargs): super(SSISPackageLocation, self).__init__(**kwargs) self.package_path = kwargs.get('package_path', None) + self.type = kwargs.get('type', None) + self.package_password = kwargs.get('package_password', None) + self.access_credential = kwargs.get('access_credential', None) + self.configuration_path = kwargs.get('configuration_path', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py index af139da47d88..cc442d8d35b8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py @@ -20,6 +20,17 @@ class SSISPackageLocation(Model): :param package_path: Required. The SSIS package path. Type: string (or Expression with resultType string). :type package_path: object + :param type: The type of SSIS package location. Possible values include: + 'SSISDB', 'File' + :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType + :param package_password: Password of the package. + :type package_password: ~azure.mgmt.datafactory.models.SecureString + :param access_credential: The package access credential. + :type access_credential: + ~azure.mgmt.datafactory.models.SSISAccessCredential + :param configuration_path: The configuration file of the package + execution. Type: string (or Expression with resultType string). + :type configuration_path: object """ _validation = { @@ -28,8 +39,16 @@ class SSISPackageLocation(Model): _attribute_map = { 'package_path': {'key': 'packagePath', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'package_password': {'key': 'typeProperties.packagePassword', 'type': 'SecureString'}, + 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, + 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, } - def __init__(self, *, package_path, **kwargs) -> None: + def __init__(self, *, package_path, type=None, package_password=None, access_credential=None, configuration_path=None, **kwargs) -> None: super(SSISPackageLocation, self).__init__(**kwargs) self.package_path = package_path + self.type = type + self.package_password = package_password + self.access_credential = access_credential + self.configuration_path = configuration_path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py new file mode 100644 index 000000000000..e1e932e97ae6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata_py3 import SsisObjectMetadata + + +class SsisPackage(SsisObjectMetadata): + """Ssis Package. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains package. + :type folder_id: long + :param project_version: Project version which contains package. + :type project_version: long + :param project_id: Project id which contains package. + :type project_id: long + :param parameters: Parameters in package + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'project_version': {'key': 'projectVersion', 'type': 'long'}, + 'project_id': {'key': 'projectId', 'type': 'long'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, project_version: int=None, project_id: int=None, parameters=None, **kwargs) -> None: + super(SsisPackage, self).__init__(id=id, name=name, description=description, **kwargs) + self.folder_id = folder_id + self.project_version = project_version + self.project_id = project_id + self.parameters = parameters + self.type = 'Package' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py new file mode 100644 index 000000000000..c456af0bab48 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisParameter(Model): + """Ssis parameter. + + :param id: Parameter id. + :type id: long + :param name: Parameter name. + :type name: str + :param description: Parameter description. + :type description: str + :param data_type: Parameter type. + :type data_type: str + :param required: Whether parameter is required. + :type required: bool + :param sensitive: Whether parameter is sensitive. + :type sensitive: bool + :param design_default_value: Design default value of parameter. + :type design_default_value: str + :param default_value: Default value of parameter. + :type default_value: str + :param sensitive_default_value: Default sensitive value of parameter. + :type sensitive_default_value: str + :param value_type: Parameter value type. + :type value_type: str + :param value_set: Parameter value set. + :type value_set: bool + :param variable: Parameter reference variable. + :type variable: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'required': {'key': 'required', 'type': 'bool'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'design_default_value': {'key': 'designDefaultValue', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'str'}, + 'sensitive_default_value': {'key': 'sensitiveDefaultValue', 'type': 'str'}, + 'value_type': {'key': 'valueType', 'type': 'str'}, + 'value_set': {'key': 'valueSet', 'type': 'bool'}, + 'variable': {'key': 'variable', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisParameter, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + self.data_type = kwargs.get('data_type', None) + self.required = kwargs.get('required', None) + self.sensitive = kwargs.get('sensitive', None) + self.design_default_value = kwargs.get('design_default_value', None) + self.default_value = kwargs.get('default_value', None) + self.sensitive_default_value = kwargs.get('sensitive_default_value', None) + self.value_type = kwargs.get('value_type', None) + self.value_set = kwargs.get('value_set', None) + self.variable = kwargs.get('variable', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py new file mode 100644 index 000000000000..6a4ff73768f0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisParameter(Model): + """Ssis parameter. + + :param id: Parameter id. + :type id: long + :param name: Parameter name. + :type name: str + :param description: Parameter description. + :type description: str + :param data_type: Parameter type. + :type data_type: str + :param required: Whether parameter is required. + :type required: bool + :param sensitive: Whether parameter is sensitive. + :type sensitive: bool + :param design_default_value: Design default value of parameter. + :type design_default_value: str + :param default_value: Default value of parameter. + :type default_value: str + :param sensitive_default_value: Default sensitive value of parameter. + :type sensitive_default_value: str + :param value_type: Parameter value type. + :type value_type: str + :param value_set: Parameter value set. + :type value_set: bool + :param variable: Parameter reference variable. + :type variable: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'required': {'key': 'required', 'type': 'bool'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'design_default_value': {'key': 'designDefaultValue', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'str'}, + 'sensitive_default_value': {'key': 'sensitiveDefaultValue', 'type': 'str'}, + 'value_type': {'key': 'valueType', 'type': 'str'}, + 'value_set': {'key': 'valueSet', 'type': 'bool'}, + 'variable': {'key': 'variable', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, data_type: str=None, required: bool=None, sensitive: bool=None, design_default_value: str=None, default_value: str=None, sensitive_default_value: str=None, value_type: str=None, value_set: bool=None, variable: str=None, **kwargs) -> None: + super(SsisParameter, self).__init__(**kwargs) + self.id = id + self.name = name + self.description = description + self.data_type = data_type + self.required = required + self.sensitive = sensitive + self.design_default_value = design_default_value + self.default_value = default_value + self.sensitive_default_value = sensitive_default_value + self.value_type = value_type + self.value_set = value_set + self.variable = variable diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py new file mode 100644 index 000000000000..c29a36fb628e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata import SsisObjectMetadata + + +class SsisProject(SsisObjectMetadata): + """Ssis project. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains project. + :type folder_id: long + :param version: Project version. + :type version: long + :param environment_refs: Environment reference in project + :type environment_refs: + list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] + :param parameters: Parameters in project + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'version': {'key': 'version', 'type': 'long'}, + 'environment_refs': {'key': 'environmentRefs', 'type': '[SsisEnvironmentReference]'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, **kwargs): + super(SsisProject, self).__init__(**kwargs) + self.folder_id = kwargs.get('folder_id', None) + self.version = kwargs.get('version', None) + self.environment_refs = kwargs.get('environment_refs', None) + self.parameters = kwargs.get('parameters', None) + self.type = 'Project' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py new file mode 100644 index 000000000000..11b95a644e2f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata_py3 import SsisObjectMetadata + + +class SsisProject(SsisObjectMetadata): + """Ssis project. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains project. + :type folder_id: long + :param version: Project version. + :type version: long + :param environment_refs: Environment reference in project + :type environment_refs: + list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] + :param parameters: Parameters in project + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'version': {'key': 'version', 'type': 'long'}, + 'environment_refs': {'key': 'environmentRefs', 'type': '[SsisEnvironmentReference]'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, version: int=None, environment_refs=None, parameters=None, **kwargs) -> None: + super(SsisProject, self).__init__(id=id, name=name, description=description, **kwargs) + self.folder_id = folder_id + self.version = version + self.environment_refs = environment_refs + self.parameters = parameters + self.type = 'Project' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py new file mode 100644 index 000000000000..73fda3b27967 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisVariable(Model): + """Ssis variable. + + :param id: Variable id. + :type id: long + :param name: Variable name. + :type name: str + :param description: Variable description. + :type description: str + :param data_type: Variable type. + :type data_type: str + :param sensitive: Whether variable is sensitive. + :type sensitive: bool + :param value: Variable value. + :type value: str + :param sensitive_value: Variable sensitive value. + :type sensitive_value: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'value': {'key': 'value', 'type': 'str'}, + 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisVariable, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + self.data_type = kwargs.get('data_type', None) + self.sensitive = kwargs.get('sensitive', None) + self.value = kwargs.get('value', None) + self.sensitive_value = kwargs.get('sensitive_value', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py new file mode 100644 index 000000000000..e709842ff465 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisVariable(Model): + """Ssis variable. + + :param id: Variable id. + :type id: long + :param name: Variable name. + :type name: str + :param description: Variable description. + :type description: str + :param data_type: Variable type. + :type data_type: str + :param sensitive: Whether variable is sensitive. + :type sensitive: bool + :param value: Variable value. + :type value: str + :param sensitive_value: Variable sensitive value. + :type sensitive_value: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'value': {'key': 'value', 'type': 'str'}, + 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, data_type: str=None, sensitive: bool=None, value: str=None, sensitive_value: str=None, **kwargs) -> None: + super(SsisVariable, self).__init__(**kwargs) + self.id = id + self.name = name + self.description = description + self.data_type = data_type + self.sensitive = sensitive + self.value = value + self.sensitive_value = sensitive_value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings.py new file mode 100644 index 000000000000..c12c0ce8860d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class StoreReadSettings(Model): + """Connector read setting. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(StoreReadSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings_py3.py new file mode 100644 index 000000000000..e2026fd52b93 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class StoreReadSettings(Model): + """Connector read setting. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, **kwargs) -> None: + super(StoreReadSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.max_concurrent_connections = max_concurrent_connections diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings.py new file mode 100644 index 000000000000..728b8cdd8c89 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class StoreWriteSettings(Model): + """Connector write settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: FileServerWriteSettings, AzureDataLakeStoreWriteSettings, + AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'FileServerWriteSettings': 'FileServerWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings'} + } + + def __init__(self, **kwargs): + super(StoreWriteSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.copy_behavior = kwargs.get('copy_behavior', None) + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings_py3.py new file mode 100644 index 000000000000..7cce5d205541 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class StoreWriteSettings(Model): + """Connector write settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: FileServerWriteSettings, AzureDataLakeStoreWriteSettings, + AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'FileServerWriteSettings': 'FileServerWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings'} + } + + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(StoreWriteSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.max_concurrent_connections = max_concurrent_connections + self.copy_behavior = copy_behavior + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py index 748cf7cba53c..ff16595aa8c7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py @@ -19,7 +19,7 @@ class StoredProcedureParameter(Model): Expression with resultType string). :type value: object :param type: Stored procedure parameter type. Possible values include: - 'String', 'Int', 'Decimal', 'Guid', 'Boolean', 'Date' + 'String', 'Int', 'Int64', 'Decimal', 'Guid', 'Boolean', 'Date' :type type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType """ diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py index bd967ce52876..2842ef9ae35c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py @@ -19,7 +19,7 @@ class StoredProcedureParameter(Model): Expression with resultType string). :type value: object :param type: Stored procedure parameter type. Possible values include: - 'String', 'Int', 'Decimal', 'Guid', 'Boolean', 'Date' + 'String', 'Int', 'Int64', 'Decimal', 'Guid', 'Boolean', 'Date' :type type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType """ diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py index 634b4268bdb5..83de0e6f61f2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py @@ -29,7 +29,7 @@ class SybaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py index 59b20a5f73cd..5b6cc0ce6ded 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py @@ -29,7 +29,7 @@ class SybaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source.py new file mode 100644 index 000000000000..02f89a8fca25 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SybaseSource(CopySource): + """A copy activity source for Sybase databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SybaseSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'SybaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source_py3.py new file mode 100644 index 000000000000..c11e96174349 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SybaseSource(CopySource): + """A copy activity source for Sybase databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SybaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset.py new file mode 100644 index 000000000000..ff2dfd5471fb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SybaseTableDataset(Dataset): + """The Sybase table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Sybase table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SybaseTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'SybaseTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset_py3.py new file mode 100644 index 000000000000..88e9d3c287fe --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SybaseTableDataset(Dataset): + """The Sybase table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Sybase table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(SybaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'SybaseTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py index b3847d7dd9f4..6e02b0d389ab 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py @@ -29,12 +29,15 @@ class TeradataLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str - :param server: Required. Server name for connection. Type: string (or - Expression with resultType string). + :param connection_string: Teradata ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Server name for connection. Type: string (or Expression + with resultType string). :type server: object :param authentication_type: AuthenticationType to be used for connection. Possible values include: 'Basic', 'Windows' @@ -53,7 +56,6 @@ class TeradataLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'server': {'required': True}, } _attribute_map = { @@ -63,6 +65,7 @@ class TeradataLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, @@ -72,6 +75,7 @@ class TeradataLinkedService(LinkedService): def __init__(self, **kwargs): super(TeradataLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) self.server = kwargs.get('server', None) self.authentication_type = kwargs.get('authentication_type', None) self.username = kwargs.get('username', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py index 236741422023..aac40efe69e0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py @@ -29,12 +29,15 @@ class TeradataLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str - :param server: Required. Server name for connection. Type: string (or - Expression with resultType string). + :param connection_string: Teradata ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Server name for connection. Type: string (or Expression + with resultType string). :type server: object :param authentication_type: AuthenticationType to be used for connection. Possible values include: 'Basic', 'Windows' @@ -53,7 +56,6 @@ class TeradataLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'server': {'required': True}, } _attribute_map = { @@ -63,6 +65,7 @@ class TeradataLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, @@ -70,8 +73,9 @@ class TeradataLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, server, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, server=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: super(TeradataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string self.server = server self.authentication_type = authentication_type self.username = username diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py new file mode 100644 index 000000000000..0f9c023f9553 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TeradataPartitionSettings(Model): + """The settings that will be leveraged for teradata source partitioning. + + :param partition_column_name: The name of the column that will be used for + proceeding range or hash partitioning. Type: string (or Expression with + resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TeradataPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py new file mode 100644 index 000000000000..04824e614ff2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TeradataPartitionSettings(Model): + """The settings that will be leveraged for teradata source partitioning. + + :param partition_column_name: The name of the column that will be used for + proceeding range or hash partitioning. Type: string (or Expression with + resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: + super(TeradataPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py new file mode 100644 index 000000000000..81d1c8e202c1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class TeradataSource(CopySource): + """A copy activity Teradata source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Teradata query. Type: string (or Expression with resultType + string). + :type query: object + :param partition_option: The partition mechanism that will be used for + teradata read in parallel. Possible values include: 'None', 'Hash', + 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.TeradataPartitionOption + :param partition_settings: The settings that will be leveraged for + teradata source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.TeradataPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, + } + + def __init__(self, **kwargs): + super(TeradataSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) + self.type = 'TeradataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py new file mode 100644 index 000000000000..79d8ccb01f14 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class TeradataSource(CopySource): + """A copy activity Teradata source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Teradata query. Type: string (or Expression with resultType + string). + :type query: object + :param partition_option: The partition mechanism that will be used for + teradata read in parallel. Possible values include: 'None', 'Hash', + 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.TeradataPartitionOption + :param partition_settings: The settings that will be leveraged for + teradata source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.TeradataPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.partition_option = partition_option + self.partition_settings = partition_settings + self.type = 'TeradataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py new file mode 100644 index 000000000000..e396bfd6fb15 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class TeradataTableDataset(Dataset): + """The Teradata database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param database: The database name of Teradata. Type: string (or + Expression with resultType string). + :type database: object + :param table: The table name of Teradata. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TeradataTableDataset, self).__init__(**kwargs) + self.database = kwargs.get('database', None) + self.table = kwargs.get('table', None) + self.type = 'TeradataTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py new file mode 100644 index 000000000000..892707b7f133 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class TeradataTableDataset(Dataset): + """The Teradata database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param database: The database name of Teradata. Type: string (or + Expression with resultType string). + :type database: object + :param table: The table name of Teradata. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, database=None, table=None, **kwargs) -> None: + super(TeradataTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.database = database + self.table = table + self.type = 'TeradataTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py index 398402178ae4..728ffc32bcb5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py @@ -35,6 +35,9 @@ class Trigger(Model): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str """ @@ -48,6 +51,7 @@ class Trigger(Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, } @@ -60,4 +64,5 @@ def __init__(self, **kwargs): self.additional_properties = kwargs.get('additional_properties', None) self.description = kwargs.get('description', None) self.runtime_state = None + self.annotations = kwargs.get('annotations', None) self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py index 09fb39534be1..862973544ab4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py @@ -35,6 +35,9 @@ class Trigger(Model): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str """ @@ -48,6 +51,7 @@ class Trigger(Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, } @@ -55,9 +59,10 @@ class Trigger(Model): 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} } - def __init__(self, *, additional_properties=None, description: str=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, description: str=None, annotations=None, **kwargs) -> None: super(Trigger, self).__init__(**kwargs) self.additional_properties = additional_properties self.description = description self.runtime_state = None + self.annotations = annotations self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status.py new file mode 100644 index 000000000000..6a581e757840 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerSubscriptionOperationStatus(Model): + """Defines the response of a trigger subscription operation. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar trigger_name: Trigger name. + :vartype trigger_name: str + :ivar status: Event Subscription Status. Possible values include: + 'Enabled', 'Provisioning', 'Deprovisioning', 'Disabled', 'Unknown' + :vartype status: str or + ~azure.mgmt.datafactory.models.EventSubscriptionStatus + """ + + _validation = { + 'trigger_name': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'trigger_name': {'key': 'triggerName', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) + self.trigger_name = None + self.status = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status_py3.py new file mode 100644 index 000000000000..40ead4c50fe4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerSubscriptionOperationStatus(Model): + """Defines the response of a trigger subscription operation. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar trigger_name: Trigger name. + :vartype trigger_name: str + :ivar status: Event Subscription Status. Possible values include: + 'Enabled', 'Provisioning', 'Deprovisioning', 'Disabled', 'Unknown' + :vartype status: str or + ~azure.mgmt.datafactory.models.EventSubscriptionStatus + """ + + _validation = { + 'trigger_name': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'trigger_name': {'key': 'triggerName', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) + self.trigger_name = None + self.status = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py index ce46a4aac7e2..939624ae5042 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py @@ -32,6 +32,9 @@ class TumblingWindowTrigger(Trigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipeline: Required. Pipeline for which runs are created when an @@ -82,6 +85,7 @@ class TumblingWindowTrigger(Trigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py index bc3114f08edd..6856629c8b91 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py @@ -32,6 +32,9 @@ class TumblingWindowTrigger(Trigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipeline: Required. Pipeline for which runs are created when an @@ -82,6 +85,7 @@ class TumblingWindowTrigger(Trigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, @@ -94,8 +98,8 @@ class TumblingWindowTrigger(Trigger): 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, } - def __init__(self, *, pipeline, frequency, interval: int, start_time, max_concurrency: int, additional_properties=None, description: str=None, end_time=None, delay=None, retry_policy=None, depends_on=None, **kwargs) -> None: - super(TumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, **kwargs) + def __init__(self, *, pipeline, frequency, interval: int, start_time, max_concurrency: int, additional_properties=None, description: str=None, annotations=None, end_time=None, delay=None, retry_policy=None, depends_on=None, **kwargs) -> None: + super(TumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) self.pipeline = pipeline self.frequency = frequency self.interval = interval diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py new file mode 100644 index 000000000000..0d92d32c12b0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class ValidationActivity(ControlActivity): + """This activity verifies that an external resource exists. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param timeout: Specifies the timeout for the activity to run. If there is + no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 + week as default. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param sleep: A delay in seconds between validation attempts. If no value + is specified, 10 seconds will be used as the default. Type: integer (or + Expression with resultType integer). + :type sleep: object + :param minimum_size: Can be used if dataset points to a file. The file + must be greater than or equal in size to the value specified. Type: + integer (or Expression with resultType integer). + :type minimum_size: object + :param child_items: Can be used if dataset points to a folder. If set to + true, the folder must have at least one file. If set to false, the folder + must be empty. Type: boolean (or Expression with resultType boolean). + :type child_items: object + :param dataset: Required. Validation activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, + 'sleep': {'key': 'typeProperties.sleep', 'type': 'object'}, + 'minimum_size': {'key': 'typeProperties.minimumSize', 'type': 'object'}, + 'child_items': {'key': 'typeProperties.childItems', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + } + + def __init__(self, **kwargs): + super(ValidationActivity, self).__init__(**kwargs) + self.timeout = kwargs.get('timeout', None) + self.sleep = kwargs.get('sleep', None) + self.minimum_size = kwargs.get('minimum_size', None) + self.child_items = kwargs.get('child_items', None) + self.dataset = kwargs.get('dataset', None) + self.type = 'Validation' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py new file mode 100644 index 000000000000..f4680400b447 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class ValidationActivity(ControlActivity): + """This activity verifies that an external resource exists. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param timeout: Specifies the timeout for the activity to run. If there is + no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 + week as default. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param sleep: A delay in seconds between validation attempts. If no value + is specified, 10 seconds will be used as the default. Type: integer (or + Expression with resultType integer). + :type sleep: object + :param minimum_size: Can be used if dataset points to a file. The file + must be greater than or equal in size to the value specified. Type: + integer (or Expression with resultType integer). + :type minimum_size: object + :param child_items: Can be used if dataset points to a folder. If set to + true, the folder must have at least one file. If set to false, the folder + must be empty. Type: boolean (or Expression with resultType boolean). + :type child_items: object + :param dataset: Required. Validation activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, + 'sleep': {'key': 'typeProperties.sleep', 'type': 'object'}, + 'minimum_size': {'key': 'typeProperties.minimumSize', 'type': 'object'}, + 'child_items': {'key': 'typeProperties.childItems', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + } + + def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout=None, sleep=None, minimum_size=None, child_items=None, **kwargs) -> None: + super(ValidationActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.timeout = timeout + self.sleep = sleep + self.minimum_size = minimum_size + self.child_items = child_items + self.dataset = dataset + self.type = 'Validation' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py index fafba164a752..6b5e8d0103f5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py @@ -29,7 +29,7 @@ class VerticaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py index 77caf915eaab..3aee3a5ae0f6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py @@ -29,7 +29,7 @@ class VerticaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py index 1670c0e9fc49..d0b642f15d38 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py @@ -27,6 +27,10 @@ class VerticaSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class VerticaSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py index 6be2edd35218..a1c4d755f2b4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py @@ -27,6 +27,10 @@ class VerticaSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class VerticaSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'VerticaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py index e84465f8ba07..151a0d000e3f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py @@ -43,9 +43,15 @@ class VerticaTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Vertica. Type: string (or Expression + with resultType string). + :type table: object + :param vertica_table_dataset_schema: The schema name of the Vertica. Type: + string (or Expression with resultType string). + :type vertica_table_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class VerticaTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'vertica_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, **kwargs): super(VerticaTableDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.vertica_table_dataset_schema = kwargs.get('vertica_table_dataset_schema', None) self.type = 'VerticaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py index 87d69bb9a443..4c2fc8da32ad 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py @@ -43,9 +43,15 @@ class VerticaTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Vertica. Type: string (or Expression + with resultType string). + :type table: object + :param vertica_table_dataset_schema: The schema name of the Vertica. Type: + string (or Expression with resultType string). + :type vertica_table_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class VerticaTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'vertica_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, vertica_table_dataset_schema=None, **kwargs) -> None: super(VerticaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.table = table + self.vertica_table_dataset_schema = vertica_table_dataset_schema self.type = 'VerticaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py new file mode 100644 index 000000000000..1c648c42c3e2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class WebHookActivity(ControlActivity): + """WebHook activity. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :ivar method: Required. Rest API method for target endpoint. Default + value: "POST" . + :vartype method: str + :param url: Required. WebHook activity target endpoint and path. Type: + string (or Expression with resultType string). + :type url: object + :param timeout: The timeout within which the webhook should be called + back. If there is no value specified, it defaults to 10 minutes. Type: + string. Pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: str + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + :param authentication: Authentication method used for calling the + endpoint. + :type authentication: + ~azure.mgmt.datafactory.models.WebActivityAuthentication + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True, 'constant': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'str'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, + } + + method = "POST" + + def __init__(self, **kwargs): + super(WebHookActivity, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.timeout = kwargs.get('timeout', None) + self.headers = kwargs.get('headers', None) + self.body = kwargs.get('body', None) + self.authentication = kwargs.get('authentication', None) + self.type = 'WebHook' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py new file mode 100644 index 000000000000..40cdc6f732da --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class WebHookActivity(ControlActivity): + """WebHook activity. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :ivar method: Required. Rest API method for target endpoint. Default + value: "POST" . + :vartype method: str + :param url: Required. WebHook activity target endpoint and path. Type: + string (or Expression with resultType string). + :type url: object + :param timeout: The timeout within which the webhook should be called + back. If there is no value specified, it defaults to 10 minutes. Type: + string. Pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: str + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + :param authentication: Authentication method used for calling the + endpoint. + :type authentication: + ~azure.mgmt.datafactory.models.WebActivityAuthentication + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True, 'constant': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'str'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, + } + + method = "POST" + + def __init__(self, *, name: str, url, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout: str=None, headers=None, body=None, authentication=None, **kwargs) -> None: + super(WebHookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.url = url + self.timeout = timeout + self.headers = headers + self.body = body + self.authentication = authentication + self.type = 'WebHook' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py index cee3bd37409c..18fadba3f3ee 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py @@ -29,7 +29,7 @@ class WebLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py index 3afa3a1bcb05..3e491b0fac4d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py @@ -29,7 +29,7 @@ class WebLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py index 13bcbfbb62d7..c5d3a2a8f00a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py @@ -27,6 +27,10 @@ class WebSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ @@ -39,6 +43,7 @@ class WebSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py index 7c5ce29d3d26..684e1d4233cc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py @@ -27,6 +27,10 @@ class WebSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ @@ -39,9 +43,10 @@ class WebSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, **kwargs) -> None: - super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'WebSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py index e9daa4ff7d2a..24973f577133 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py @@ -29,7 +29,7 @@ class XeroLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py index eb665519f4ea..433c65ade739 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py @@ -29,7 +29,7 @@ class XeroLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py index 4695780bf41b..a37852a5b419 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py @@ -27,6 +27,10 @@ class XeroSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class XeroSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py index 8de950856bae..bbee6c6fa1f0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py @@ -27,6 +27,10 @@ class XeroSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class XeroSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'XeroSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py index 997efb5fc242..fe34dff77ea9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py @@ -29,7 +29,7 @@ class ZohoLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py index c05d018146d6..f82f6221592b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py @@ -29,7 +29,7 @@ class ZohoLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py index 248d50d55297..274c6fc09f19 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py @@ -27,6 +27,10 @@ class ZohoSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class ZohoSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py index 5f0547d9465a..6d7dc29bdf8a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py @@ -27,6 +27,10 @@ class ZohoSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class ZohoSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'ZohoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py index ffc98f67bed2..826179f5fb63 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py @@ -21,8 +21,8 @@ from .pipeline_runs_operations import PipelineRunsOperations from .activity_runs_operations import ActivityRunsOperations from .triggers_operations import TriggersOperations -from .rerun_triggers_operations import RerunTriggersOperations from .trigger_runs_operations import TriggerRunsOperations +from .rerun_triggers_operations import RerunTriggersOperations __all__ = [ 'Operations', @@ -37,6 +37,6 @@ 'PipelineRunsOperations', 'ActivityRunsOperations', 'TriggersOperations', - 'RerunTriggersOperations', 'TriggerRunsOperations', + 'RerunTriggersOperations', ] diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py index 080e8c87ba18..4a648d96586c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py @@ -105,3 +105,75 @@ def get_feature_value( return deserialized get_feature_value.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/getFeatureValue'} + + def get_feature_value_by_factory( + self, resource_group_name, factory_name, feature_name=None, feature_type=None, custom_headers=None, raw=False, **operation_config): + """Get exposure control feature for specific factory. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param feature_name: The feature name. + :type feature_name: str + :param feature_type: The feature type. + :type feature_type: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: ExposureControlResponse or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) + + # Construct URL + url = self.get_feature_value_by_factory.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('ExposureControlResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getFeatureValue'} diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py index 8a01ce6a8408..343396e705ac 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py @@ -312,7 +312,7 @@ def delete( delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} def create_run( - self, resource_group_name, factory_name, pipeline_name, reference_pipeline_run_id=None, parameters=None, custom_headers=None, raw=False, **operation_config): + self, resource_group_name, factory_name, pipeline_name, reference_pipeline_run_id=None, is_recovery=None, start_activity_name=None, parameters=None, custom_headers=None, raw=False, **operation_config): """Creates a run of a pipeline. :param resource_group_name: The resource group name. @@ -325,6 +325,13 @@ def create_run( ID is specified the parameters of the specified run will be used to create a new run. :type reference_pipeline_run_id: str + :param is_recovery: Recovery mode flag. If recovery mode is set to + true, the specified referenced pipeline run and the new run will be + grouped under the same groupId. + :type is_recovery: bool + :param start_activity_name: In recovery mode, the rerun will start + from this activity. If not specified, all activities will run. + :type start_activity_name: str :param parameters: Parameters of the pipeline run. These parameters will be used only if the runId is not specified. :type parameters: dict[str, object] @@ -353,6 +360,10 @@ def create_run( query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') if reference_pipeline_run_id is not None: query_parameters['referencePipelineRunId'] = self._serialize.query("reference_pipeline_run_id", reference_pipeline_run_id, 'str') + if is_recovery is not None: + query_parameters['isRecovery'] = self._serialize.query("is_recovery", is_recovery, 'bool') + if start_activity_name is not None: + query_parameters['startActivityName'] = self._serialize.query("start_activity_name", start_activity_name, 'str') # Construct headers header_parameters = {} diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py index 51e9b0ac37a3..e4e4774ae3bc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py @@ -37,6 +37,65 @@ def __init__(self, client, config, serializer, deserializer): self.config = config + def rerun( + self, resource_group_name, factory_name, trigger_name, run_id, custom_headers=None, raw=False, **operation_config): + """Rerun single trigger instance by runId. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param run_id: The pipeline run identifier. + :type run_id: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.rerun.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + 'runId': self._serialize.url("run_id", run_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + rerun.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun'} + def query_by_factory( self, resource_group_name, factory_name, filter_parameters, custom_headers=None, raw=False, **operation_config): """Query trigger runs. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/triggers_operations.py index f80cfcb2870b..d6a2d51cf85a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/triggers_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/triggers_operations.py @@ -316,6 +316,269 @@ def delete( delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} + def _subscribe_to_events_initial( + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.subscribe_to_events.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def subscribe_to_events( + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Subscribe event trigger to events. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + TriggerSubscriptionOperationStatus or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus]] + :raises: :class:`CloudError` + """ + raw_result = self._subscribe_to_events_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + subscribe_to_events.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} + + def get_event_subscription_status( + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, **operation_config): + """Get a trigger's event subscription status. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: TriggerSubscriptionOperationStatus or ClientRawResponse if + raw=true + :rtype: + ~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get_event_subscription_status.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get_event_subscription_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus'} + + + def _unsubscribe_from_events_initial( + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.unsubscribe_from_events.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def unsubscribe_from_events( + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Unsubscribe event trigger from events. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + TriggerSubscriptionOperationStatus or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus]] + :raises: :class:`CloudError` + """ + raw_result = self._unsubscribe_from_events_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + unsubscribe_from_events.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} + + def _start_initial( self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, **operation_config): # Construct URL diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py index 981739e4ff95..e4f3d5055303 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py @@ -9,5 +9,5 @@ # regenerated. # -------------------------------------------------------------------------- -VERSION = "0.7.0" +VERSION = "0.8.0" diff --git a/sdk/datafactory/azure-mgmt-datafactory/setup.py b/sdk/datafactory/azure-mgmt-datafactory/setup.py index 4b3ca4777aca..301b3319927e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/setup.py +++ b/sdk/datafactory/azure-mgmt-datafactory/setup.py @@ -53,6 +53,7 @@ version=version, description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME), long_description=readme + '\n\n' + history, + long_description_content_type='text/x-rst', license='MIT License', author='Microsoft Corporation', author_email='azpysdkhelp@microsoft.com', @@ -63,7 +64,6 @@ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7',