From cf20f44f91e05cdca86960430934189961faf0f1 Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Fri, 7 Jun 2019 08:58:31 -0700 Subject: [PATCH] [AutoPR datafactory/resource-manager] [Datafactory] ADLS Gen 2 support for HDI BYOC and vNet support for HDI on demand (#5663) * Generated from e4bd3471cedb625a2d65c1045f8d13f532f3f945 ADLS Gen 2 support for HDI BYOC and vNet support for HDI on demand * Packaging update of azure-mgmt-datafactory --- .../azure-mgmt-datafactory/MANIFEST.in | 1 + .../azure/mgmt/datafactory/models/__init__.py | 137 ++++++++++++++++++ .../models/amazon_mws_linked_service.py | 2 +- .../models/amazon_mws_linked_service_py3.py | 2 +- .../datafactory/models/amazon_mws_source.py | 5 + .../models/amazon_mws_source_py3.py | 9 +- .../models/amazon_redshift_linked_service.py | 2 +- .../amazon_redshift_linked_service_py3.py | 2 +- .../models/amazon_redshift_source.py | 5 + .../models/amazon_redshift_source_py3.py | 9 +- .../datafactory/models/amazon_s3_dataset.py | 10 ++ .../models/amazon_s3_dataset_py3.py | 12 +- .../models/amazon_s3_linked_service.py | 9 +- .../models/amazon_s3_linked_service_py3.py | 11 +- .../models/azure_batch_linked_service.py | 2 +- .../models/azure_batch_linked_service_py3.py | 2 +- .../datafactory/models/azure_blob_dataset.py | 10 ++ .../models/azure_blob_dataset_py3.py | 12 +- .../models/azure_blob_fs_dataset.py | 85 +++++++++++ .../models/azure_blob_fs_dataset_py3.py | 85 +++++++++++ .../models/azure_blob_fs_linked_service.py | 86 +++++++++++ .../azure_blob_fs_linked_service_py3.py | 86 +++++++++++ .../datafactory/models/azure_blob_fs_sink.py | 67 +++++++++ .../models/azure_blob_fs_sink_py3.py | 67 +++++++++ .../models/azure_blob_fs_source.py | 68 +++++++++ .../models/azure_blob_fs_source_py3.py | 68 +++++++++ .../azure_blob_storage_linked_service.py | 2 +- .../azure_blob_storage_linked_service_py3.py | 2 +- .../azure_data_explorer_linked_service.py | 86 +++++++++++ .../azure_data_explorer_linked_service_py3.py | 86 +++++++++++ .../models/azure_data_explorer_sink.py | 76 ++++++++++ .../models/azure_data_explorer_sink_py3.py | 76 ++++++++++ .../models/azure_data_explorer_source.py | 70 +++++++++ .../models/azure_data_explorer_source_py3.py | 70 +++++++++ .../azure_data_explorer_table_dataset.py | 72 +++++++++ .../azure_data_explorer_table_dataset_py3.py | 72 +++++++++ ...zure_data_lake_analytics_linked_service.py | 2 +- ..._data_lake_analytics_linked_service_py3.py | 2 +- .../models/azure_data_lake_store_dataset.py | 5 +- .../azure_data_lake_store_dataset_py3.py | 7 +- .../azure_data_lake_store_linked_service.py | 2 +- ...zure_data_lake_store_linked_service_py3.py | 2 +- .../models/azure_data_lake_store_sink.py | 5 + .../models/azure_data_lake_store_sink_py3.py | 9 +- .../models/azure_data_lake_store_source.py | 5 + .../azure_data_lake_store_source_py3.py | 9 +- .../models/azure_databricks_linked_service.py | 18 ++- .../azure_databricks_linked_service_py3.py | 20 ++- .../models/azure_function_linked_service.py | 2 +- .../azure_function_linked_service_py3.py | 2 +- .../models/azure_key_vault_linked_service.py | 2 +- .../azure_key_vault_linked_service_py3.py | 2 +- .../models/azure_ml_linked_service.py | 2 +- .../models/azure_ml_linked_service_py3.py | 2 +- .../models/azure_my_sql_linked_service.py | 2 +- .../models/azure_my_sql_linked_service_py3.py | 2 +- .../datafactory/models/azure_my_sql_source.py | 5 + .../models/azure_my_sql_source_py3.py | 9 +- .../azure_postgre_sql_linked_service.py | 2 +- .../azure_postgre_sql_linked_service_py3.py | 2 +- .../models/azure_postgre_sql_source.py | 5 + .../models/azure_postgre_sql_source_py3.py | 9 +- .../datafactory/models/azure_queue_sink.py | 5 + .../models/azure_queue_sink_py3.py | 9 +- .../models/azure_search_index_sink.py | 5 + .../models/azure_search_index_sink_py3.py | 9 +- .../models/azure_search_linked_service.py | 2 +- .../models/azure_search_linked_service_py3.py | 2 +- .../azure_sql_database_linked_service.py | 2 +- .../azure_sql_database_linked_service_py3.py | 2 +- .../models/azure_sql_dw_linked_service.py | 2 +- .../models/azure_sql_dw_linked_service_py3.py | 2 +- .../models/azure_storage_linked_service.py | 2 +- .../azure_storage_linked_service_py3.py | 2 +- .../datafactory/models/azure_table_sink.py | 5 + .../models/azure_table_sink_py3.py | 9 +- .../datafactory/models/azure_table_source.py | 5 + .../models/azure_table_source_py3.py | 9 +- .../azure_table_storage_linked_service.py | 2 +- .../azure_table_storage_linked_service_py3.py | 2 +- .../datafactory/models/blob_events_trigger.py | 4 + .../models/blob_events_trigger_py3.py | 8 +- .../mgmt/datafactory/models/blob_sink.py | 5 + .../mgmt/datafactory/models/blob_sink_py3.py | 9 +- .../mgmt/datafactory/models/blob_source.py | 5 + .../datafactory/models/blob_source_py3.py | 9 +- .../mgmt/datafactory/models/blob_trigger.py | 4 + .../datafactory/models/blob_trigger_py3.py | 8 +- .../models/cassandra_linked_service.py | 2 +- .../models/cassandra_linked_service_py3.py | 2 +- .../datafactory/models/cassandra_source.py | 5 + .../models/cassandra_source_py3.py | 9 +- .../models/concur_linked_service.py | 2 +- .../models/concur_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/concur_source.py | 5 + .../datafactory/models/concur_source_py3.py | 9 +- .../datafactory/models/control_activity.py | 8 +- .../models/control_activity_py3.py | 8 +- .../mgmt/datafactory/models/copy_activity.py | 4 + .../datafactory/models/copy_activity_py3.py | 6 +- .../mgmt/datafactory/models/copy_sink.py | 17 ++- .../mgmt/datafactory/models/copy_sink_py3.py | 19 ++- .../mgmt/datafactory/models/copy_source.py | 24 ++- .../datafactory/models/copy_source_py3.py | 26 ++-- .../models/cosmos_db_linked_service.py | 2 +- .../models/cosmos_db_linked_service_py3.py | 2 +- ...smos_db_mongo_db_api_collection_dataset.py | 73 ++++++++++ ..._db_mongo_db_api_collection_dataset_py3.py | 73 ++++++++++ .../cosmos_db_mongo_db_api_linked_service.py | 67 +++++++++ ...smos_db_mongo_db_api_linked_service_py3.py | 67 +++++++++ .../models/cosmos_db_mongo_db_api_sink.py | 68 +++++++++ .../models/cosmos_db_mongo_db_api_sink_py3.py | 68 +++++++++ .../models/cosmos_db_mongo_db_api_source.py | 71 +++++++++ .../cosmos_db_mongo_db_api_source_py3.py | 71 +++++++++ .../models/couchbase_linked_service.py | 2 +- .../models/couchbase_linked_service_py3.py | 2 +- .../datafactory/models/couchbase_source.py | 5 + .../models/couchbase_source_py3.py | 9 +- .../datafactory/models/custom_activity.py | 5 + .../datafactory/models/custom_activity_py3.py | 7 +- .../custom_data_source_linked_service.py | 2 +- .../custom_data_source_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/custom_dataset.py | 3 +- .../datafactory/models/custom_dataset_py3.py | 5 +- .../data_factory_management_client_enums.py | 30 ++++ .../azure/mgmt/datafactory/models/dataset.py | 41 +++--- .../mgmt/datafactory/models/dataset_py3.py | 41 +++--- .../datafactory/models/db2_linked_service.py | 2 +- .../models/db2_linked_service_py3.py | 2 +- .../models/document_db_collection_sink.py | 5 + .../models/document_db_collection_sink_py3.py | 9 +- .../models/document_db_collection_source.py | 5 + .../document_db_collection_source_py3.py | 9 +- .../models/drill_linked_service.py | 2 +- .../models/drill_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/drill_source.py | 5 + .../datafactory/models/drill_source_py3.py | 9 +- .../models/dynamics_ax_linked_service.py | 93 ++++++++++++ .../models/dynamics_ax_linked_service_py3.py | 93 ++++++++++++ .../models/dynamics_ax_resource_dataset.py | 73 ++++++++++ .../dynamics_ax_resource_dataset_py3.py | 73 ++++++++++ .../datafactory/models/dynamics_ax_source.py | 57 ++++++++ .../models/dynamics_ax_source_py3.py | 57 ++++++++ .../models/dynamics_linked_service.py | 2 +- .../models/dynamics_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/dynamics_sink.py | 5 + .../datafactory/models/dynamics_sink_py3.py | 9 +- .../datafactory/models/dynamics_source.py | 5 + .../datafactory/models/dynamics_source_py3.py | 9 +- .../models/eloqua_linked_service.py | 2 +- .../models/eloqua_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/eloqua_source.py | 5 + .../datafactory/models/eloqua_source_py3.py | 9 +- .../models/file_server_linked_service.py | 2 +- .../models/file_server_linked_service_py3.py | 2 +- .../datafactory/models/file_share_dataset.py | 10 ++ .../models/file_share_dataset_py3.py | 12 +- .../datafactory/models/file_system_sink.py | 5 + .../models/file_system_sink_py3.py | 9 +- .../datafactory/models/file_system_source.py | 5 + .../models/file_system_source_py3.py | 9 +- .../models/ftp_server_linked_service.py | 2 +- .../models/ftp_server_linked_service_py3.py | 2 +- .../models/google_ad_words_linked_service.py | 119 +++++++++++++++ .../google_ad_words_linked_service_py3.py | 119 +++++++++++++++ .../models/google_ad_words_object_dataset.py | 72 +++++++++ .../google_ad_words_object_dataset_py3.py | 72 +++++++++ .../models/google_ad_words_source.py | 57 ++++++++ .../models/google_ad_words_source_py3.py | 57 ++++++++ .../models/google_big_query_linked_service.py | 2 +- .../google_big_query_linked_service_py3.py | 2 +- .../models/google_big_query_source.py | 5 + .../models/google_big_query_source_py3.py | 9 +- .../models/greenplum_linked_service.py | 2 +- .../models/greenplum_linked_service_py3.py | 2 +- .../datafactory/models/greenplum_source.py | 5 + .../models/greenplum_source_py3.py | 9 +- .../models/hbase_linked_service.py | 2 +- .../models/hbase_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/hbase_source.py | 5 + .../datafactory/models/hbase_source_py3.py | 9 +- .../models/hd_insight_linked_service.py | 8 +- .../models/hd_insight_linked_service_py3.py | 10 +- .../hd_insight_on_demand_linked_service.py | 14 +- ...hd_insight_on_demand_linked_service_py3.py | 16 +- .../datafactory/models/hdfs_linked_service.py | 2 +- .../models/hdfs_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/hdfs_source.py | 5 + .../datafactory/models/hdfs_source_py3.py | 9 +- .../datafactory/models/hive_linked_service.py | 2 +- .../models/hive_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/hive_source.py | 5 + .../datafactory/models/hive_source_py3.py | 9 +- .../datafactory/models/http_linked_service.py | 2 +- .../models/http_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/http_source.py | 5 + .../datafactory/models/http_source_py3.py | 9 +- .../models/hubspot_linked_service.py | 2 +- .../models/hubspot_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/hubspot_source.py | 5 + .../datafactory/models/hubspot_source_py3.py | 9 +- .../models/impala_linked_service.py | 2 +- .../models/impala_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/impala_source.py | 5 + .../datafactory/models/impala_source_py3.py | 9 +- .../datafactory/models/jira_linked_service.py | 2 +- .../models/jira_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/jira_source.py | 5 + .../datafactory/models/jira_source_py3.py | 9 +- .../mgmt/datafactory/models/linked_service.py | 64 ++++---- .../datafactory/models/linked_service_py3.py | 64 ++++---- .../models/magento_linked_service.py | 2 +- .../models/magento_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/magento_source.py | 5 + .../datafactory/models/magento_source_py3.py | 9 +- .../models/maria_db_linked_service.py | 2 +- .../models/maria_db_linked_service_py3.py | 2 +- .../datafactory/models/maria_db_source.py | 5 + .../datafactory/models/maria_db_source_py3.py | 9 +- .../models/marketo_linked_service.py | 2 +- .../models/marketo_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/marketo_source.py | 5 + .../datafactory/models/marketo_source_py3.py | 9 +- .../mongo_db_cursor_methods_properties.py | 53 +++++++ .../mongo_db_cursor_methods_properties_py3.py | 53 +++++++ .../models/mongo_db_linked_service.py | 2 +- .../models/mongo_db_linked_service_py3.py | 2 +- .../datafactory/models/mongo_db_source.py | 5 + .../datafactory/models/mongo_db_source_py3.py | 9 +- .../models/mongo_db_v2_collection_dataset.py | 73 ++++++++++ .../mongo_db_v2_collection_dataset_py3.py | 73 ++++++++++ .../models/mongo_db_v2_linked_service.py | 66 +++++++++ .../models/mongo_db_v2_linked_service_py3.py | 66 +++++++++ .../datafactory/models/mongo_db_v2_source.py | 71 +++++++++ .../models/mongo_db_v2_source_py3.py | 71 +++++++++ .../models/multiple_pipeline_trigger.py | 4 + .../models/multiple_pipeline_trigger_py3.py | 8 +- .../models/my_sql_linked_service.py | 2 +- .../models/my_sql_linked_service_py3.py | 2 +- .../models/netezza_linked_service.py | 2 +- .../models/netezza_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/netezza_source.py | 5 + .../datafactory/models/netezza_source_py3.py | 9 +- .../models/odata_linked_service.py | 51 ++++++- .../models/odata_linked_service_py3.py | 53 ++++++- .../datafactory/models/odbc_linked_service.py | 2 +- .../models/odbc_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/odbc_sink.py | 5 + .../mgmt/datafactory/models/odbc_sink_py3.py | 9 +- .../datafactory/models/office365_dataset.py | 79 ++++++++++ .../models/office365_dataset_py3.py | 79 ++++++++++ .../models/office365_linked_service.py | 83 +++++++++++ .../models/office365_linked_service_py3.py | 83 +++++++++++ .../datafactory/models/office365_source.py | 52 +++++++ .../models/office365_source_py3.py | 52 +++++++ .../models/oracle_linked_service.py | 2 +- .../models/oracle_linked_service_py3.py | 2 +- .../oracle_service_cloud_linked_service.py | 95 ++++++++++++ ...oracle_service_cloud_linked_service_py3.py | 95 ++++++++++++ .../oracle_service_cloud_object_dataset.py | 72 +++++++++ ...oracle_service_cloud_object_dataset_py3.py | 72 +++++++++ .../models/oracle_service_cloud_source.py | 57 ++++++++ .../models/oracle_service_cloud_source_py3.py | 57 ++++++++ .../mgmt/datafactory/models/oracle_sink.py | 5 + .../datafactory/models/oracle_sink_py3.py | 9 +- .../mgmt/datafactory/models/oracle_source.py | 5 + .../datafactory/models/oracle_source_py3.py | 9 +- .../models/oracle_table_dataset.py | 5 +- .../models/oracle_table_dataset_py3.py | 7 +- .../models/paypal_linked_service.py | 2 +- .../models/paypal_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/paypal_source.py | 5 + .../datafactory/models/paypal_source_py3.py | 9 +- .../models/phoenix_linked_service.py | 2 +- .../models/phoenix_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/phoenix_source.py | 5 + .../datafactory/models/phoenix_source_py3.py | 9 +- .../mgmt/datafactory/models/pipeline_run.py | 12 ++ .../datafactory/models/pipeline_run_py3.py | 12 ++ .../models/postgre_sql_linked_service.py | 2 +- .../models/postgre_sql_linked_service_py3.py | 2 +- .../models/presto_linked_service.py | 2 +- .../models/presto_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/presto_source.py | 5 + .../datafactory/models/presto_source_py3.py | 9 +- .../models/quick_books_linked_service.py | 2 +- .../models/quick_books_linked_service_py3.py | 2 +- .../datafactory/models/quick_books_source.py | 5 + .../models/quick_books_source_py3.py | 9 +- .../datafactory/models/relational_source.py | 5 + .../models/relational_source_py3.py | 9 +- .../models/rerun_tumbling_window_trigger.py | 4 + .../rerun_tumbling_window_trigger_py3.py | 8 +- .../models/responsys_linked_service.py | 2 +- .../models/responsys_linked_service_py3.py | 2 +- .../datafactory/models/responsys_source.py | 5 + .../models/responsys_source_py3.py | 9 +- .../models/rest_resource_dataset.py | 93 ++++++++++++ .../models/rest_resource_dataset_py3.py | 93 ++++++++++++ .../models/rest_service_linked_service.py | 107 ++++++++++++++ .../models/rest_service_linked_service_py3.py | 107 ++++++++++++++ .../mgmt/datafactory/models/rest_source.py | 65 +++++++++ .../datafactory/models/rest_source_py3.py | 65 +++++++++ .../datafactory/models/run_query_filter.py | 2 +- .../models/run_query_filter_py3.py | 2 +- .../models/salesforce_linked_service.py | 2 +- .../models/salesforce_linked_service_py3.py | 2 +- ...lesforce_marketing_cloud_linked_service.py | 2 +- ...orce_marketing_cloud_linked_service_py3.py | 2 +- .../salesforce_marketing_cloud_source.py | 5 + .../salesforce_marketing_cloud_source_py3.py | 9 +- .../datafactory/models/salesforce_sink.py | 5 + .../datafactory/models/salesforce_sink_py3.py | 9 +- .../datafactory/models/salesforce_source.py | 5 + .../models/salesforce_source_py3.py | 9 +- .../models/sap_bw_linked_service.py | 2 +- .../models/sap_bw_linked_service_py3.py | 2 +- .../sap_cloud_for_customer_linked_service.py | 2 +- ...p_cloud_for_customer_linked_service_py3.py | 2 +- .../models/sap_cloud_for_customer_sink.py | 5 + .../models/sap_cloud_for_customer_sink_py3.py | 9 +- .../models/sap_cloud_for_customer_source.py | 5 + .../sap_cloud_for_customer_source_py3.py | 9 +- .../models/sap_ecc_linked_service.py | 2 +- .../models/sap_ecc_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/sap_ecc_source.py | 5 + .../datafactory/models/sap_ecc_source_py3.py | 9 +- .../models/sap_hana_linked_service.py | 2 +- .../models/sap_hana_linked_service_py3.py | 2 +- .../models/sap_open_hub_linked_service.py | 99 +++++++++++++ .../models/sap_open_hub_linked_service_py3.py | 99 +++++++++++++ .../datafactory/models/sap_open_hub_source.py | 53 +++++++ .../models/sap_open_hub_source_py3.py | 53 +++++++ .../models/sap_open_hub_table_dataset.py | 87 +++++++++++ .../models/sap_open_hub_table_dataset_py3.py | 87 +++++++++++ .../datafactory/models/schedule_trigger.py | 4 + .../models/schedule_trigger_py3.py | 8 +- .../models/service_now_linked_service.py | 2 +- .../models/service_now_linked_service_py3.py | 2 +- .../datafactory/models/service_now_source.py | 5 + .../models/service_now_source_py3.py | 9 +- .../models/sftp_server_linked_service.py | 2 +- .../models/sftp_server_linked_service_py3.py | 2 +- .../models/shopify_linked_service.py | 2 +- .../models/shopify_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/shopify_source.py | 5 + .../datafactory/models/shopify_source_py3.py | 9 +- .../models/spark_linked_service.py | 2 +- .../models/spark_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/spark_source.py | 5 + .../datafactory/models/spark_source_py3.py | 9 +- .../mgmt/datafactory/models/sql_dw_sink.py | 5 + .../datafactory/models/sql_dw_sink_py3.py | 9 +- .../mgmt/datafactory/models/sql_dw_source.py | 5 + .../datafactory/models/sql_dw_source_py3.py | 9 +- .../models/sql_server_linked_service.py | 2 +- .../models/sql_server_linked_service_py3.py | 2 +- .../azure/mgmt/datafactory/models/sql_sink.py | 5 + .../mgmt/datafactory/models/sql_sink_py3.py | 9 +- .../mgmt/datafactory/models/sql_source.py | 5 + .../mgmt/datafactory/models/sql_source_py3.py | 9 +- .../models/square_linked_service.py | 2 +- .../models/square_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/square_source.py | 5 + .../datafactory/models/square_source_py3.py | 9 +- .../datafactory/models/ssis_environment.py | 51 +++++++ .../models/ssis_environment_py3.py | 51 +++++++ .../models/ssis_environment_reference.py | 40 +++++ .../models/ssis_environment_reference_py3.py | 40 +++++ .../mgmt/datafactory/models/ssis_folder.py | 43 ++++++ .../datafactory/models/ssis_folder_py3.py | 43 ++++++ .../models/ssis_object_metadata.py | 7 + .../models/ssis_object_metadata_py3.py | 7 + .../mgmt/datafactory/models/ssis_package.py | 59 ++++++++ .../datafactory/models/ssis_package_py3.py | 59 ++++++++ .../mgmt/datafactory/models/ssis_parameter.py | 72 +++++++++ .../datafactory/models/ssis_parameter_py3.py | 72 +++++++++ .../mgmt/datafactory/models/ssis_project.py | 60 ++++++++ .../datafactory/models/ssis_project_py3.py | 60 ++++++++ .../mgmt/datafactory/models/ssis_variable.py | 52 +++++++ .../datafactory/models/ssis_variable_py3.py | 52 +++++++ .../models/sybase_linked_service.py | 2 +- .../models/sybase_linked_service_py3.py | 2 +- .../datafactory/models/tabular_translator.py | 6 + .../models/tabular_translator_py3.py | 8 +- .../models/teradata_linked_service.py | 2 +- .../models/teradata_linked_service_py3.py | 2 +- .../azure/mgmt/datafactory/models/trigger.py | 5 + .../mgmt/datafactory/models/trigger_py3.py | 7 +- .../models/tumbling_window_trigger.py | 4 + .../models/tumbling_window_trigger_py3.py | 8 +- .../datafactory/models/validation_activity.py | 81 +++++++++++ .../models/validation_activity_py3.py | 81 +++++++++++ .../models/vertica_linked_service.py | 2 +- .../models/vertica_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/vertica_source.py | 5 + .../datafactory/models/vertica_source_py3.py | 9 +- .../datafactory/models/web_hook_activity.py | 92 ++++++++++++ .../models/web_hook_activity_py3.py | 92 ++++++++++++ .../datafactory/models/web_linked_service.py | 2 +- .../models/web_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/web_source.py | 5 + .../mgmt/datafactory/models/web_source_py3.py | 9 +- .../datafactory/models/xero_linked_service.py | 2 +- .../models/xero_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/xero_source.py | 5 + .../datafactory/models/xero_source_py3.py | 9 +- .../datafactory/models/zoho_linked_service.py | 2 +- .../models/zoho_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/zoho_source.py | 5 + .../datafactory/models/zoho_source_py3.py | 9 +- .../operations/exposure_control_operations.py | 72 +++++++++ .../operations/pipelines_operations.py | 13 +- .../azure/mgmt/datafactory/version.py | 2 +- .../azure-mgmt-datafactory/setup.py | 1 + 415 files changed, 7979 insertions(+), 457 deletions(-) create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/MANIFEST.in b/sdk/datafactory/azure-mgmt-datafactory/MANIFEST.in index 6ceb27f7a96e..e4884efef41b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/MANIFEST.in +++ b/sdk/datafactory/azure-mgmt-datafactory/MANIFEST.in @@ -1,3 +1,4 @@ +recursive-include tests *.py *.yaml include *.rst include azure/__init__.py include azure/mgmt/__init__.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 46e9bf12bf1a..f8279c1a99bd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -95,6 +95,10 @@ from .schedule_trigger_py3 import ScheduleTrigger from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger from .azure_function_linked_service_py3 import AzureFunctionLinkedService + from .azure_data_explorer_linked_service_py3 import AzureDataExplorerLinkedService + from .google_ad_words_linked_service_py3 import GoogleAdWordsLinkedService + from .oracle_service_cloud_linked_service_py3 import OracleServiceCloudLinkedService + from .dynamics_ax_linked_service_py3 import DynamicsAXLinkedService from .responsys_linked_service_py3 import ResponsysLinkedService from .azure_databricks_linked_service_py3 import AzureDatabricksLinkedService from .azure_data_lake_analytics_linked_service_py3 import AzureDataLakeAnalyticsLinkedService @@ -138,10 +142,16 @@ from .custom_data_source_linked_service_py3 import CustomDataSourceLinkedService from .amazon_redshift_linked_service_py3 import AmazonRedshiftLinkedService from .amazon_s3_linked_service_py3 import AmazonS3LinkedService + from .rest_service_linked_service_py3 import RestServiceLinkedService + from .sap_open_hub_linked_service_py3 import SapOpenHubLinkedService from .sap_ecc_linked_service_py3 import SapEccLinkedService from .sap_cloud_for_customer_linked_service_py3 import SapCloudForCustomerLinkedService from .salesforce_linked_service_py3 import SalesforceLinkedService + from .office365_linked_service_py3 import Office365LinkedService + from .azure_blob_fs_linked_service_py3 import AzureBlobFSLinkedService from .azure_data_lake_store_linked_service_py3 import AzureDataLakeStoreLinkedService + from .cosmos_db_mongo_db_api_linked_service_py3 import CosmosDbMongoDbApiLinkedService + from .mongo_db_v2_linked_service_py3 import MongoDbV2LinkedService from .mongo_db_linked_service_py3 import MongoDbLinkedService from .cassandra_linked_service_py3 import CassandraLinkedService from .web_client_certificate_authentication_py3 import WebClientCertificateAuthentication @@ -172,6 +182,10 @@ from .azure_table_storage_linked_service_py3 import AzureTableStorageLinkedService from .azure_blob_storage_linked_service_py3 import AzureBlobStorageLinkedService from .azure_storage_linked_service_py3 import AzureStorageLinkedService + from .google_ad_words_object_dataset_py3 import GoogleAdWordsObjectDataset + from .azure_data_explorer_table_dataset_py3 import AzureDataExplorerTableDataset + from .oracle_service_cloud_object_dataset_py3 import OracleServiceCloudObjectDataset + from .dynamics_ax_resource_dataset_py3 import DynamicsAXResourceDataset from .responsys_object_dataset_py3 import ResponsysObjectDataset from .salesforce_marketing_cloud_object_dataset_py3 import SalesforceMarketingCloudObjectDataset from .vertica_table_dataset_py3 import VerticaTableDataset @@ -216,7 +230,9 @@ from .http_dataset_py3 import HttpDataset from .azure_search_index_dataset_py3 import AzureSearchIndexDataset from .web_table_dataset_py3 import WebTableDataset + from .rest_resource_dataset_py3 import RestResourceDataset from .sql_server_table_dataset_py3 import SqlServerTableDataset + from .sap_open_hub_table_dataset_py3 import SapOpenHubTableDataset from .sap_ecc_resource_dataset_py3 import SapEccResourceDataset from .sap_cloud_for_customer_resource_dataset_py3 import SapCloudForCustomerResourceDataset from .salesforce_object_dataset_py3 import SalesforceObjectDataset @@ -224,8 +240,12 @@ from .azure_my_sql_table_dataset_py3 import AzureMySqlTableDataset from .oracle_table_dataset_py3 import OracleTableDataset from .odata_resource_dataset_py3 import ODataResourceDataset + from .cosmos_db_mongo_db_api_collection_dataset_py3 import CosmosDbMongoDbApiCollectionDataset + from .mongo_db_v2_collection_dataset_py3 import MongoDbV2CollectionDataset from .mongo_db_collection_dataset_py3 import MongoDbCollectionDataset from .file_share_dataset_py3 import FileShareDataset + from .office365_dataset_py3 import Office365Dataset + from .azure_blob_fs_dataset_py3 import AzureBlobFSDataset from .azure_data_lake_store_dataset_py3 import AzureDataLakeStoreDataset from .dynamics_entity_dataset_py3 import DynamicsEntityDataset from .document_db_collection_dataset_py3 import DocumentDbCollectionDataset @@ -250,6 +270,9 @@ from .web_activity_py3 import WebActivity from .redshift_unload_settings_py3 import RedshiftUnloadSettings from .amazon_redshift_source_py3 import AmazonRedshiftSource + from .google_ad_words_source_py3 import GoogleAdWordsSource + from .oracle_service_cloud_source_py3 import OracleServiceCloudSource + from .dynamics_ax_source_py3 import DynamicsAXSource from .responsys_source_py3 import ResponsysSource from .salesforce_marketing_cloud_source_py3 import SalesforceMarketingCloudSource from .vertica_source_py3 import VerticaSource @@ -281,11 +304,17 @@ from .azure_postgre_sql_source_py3 import AzurePostgreSqlSource from .amazon_mws_source_py3 import AmazonMWSSource from .http_source_py3 import HttpSource + from .azure_blob_fs_source_py3 import AzureBlobFSSource from .azure_data_lake_store_source_py3 import AzureDataLakeStoreSource + from .office365_source_py3 import Office365Source + from .mongo_db_cursor_methods_properties_py3 import MongoDbCursorMethodsProperties + from .cosmos_db_mongo_db_api_source_py3 import CosmosDbMongoDbApiSource + from .mongo_db_v2_source_py3 import MongoDbV2Source from .mongo_db_source_py3 import MongoDbSource from .cassandra_source_py3 import CassandraSource from .web_source_py3 import WebSource from .oracle_source_py3 import OracleSource + from .azure_data_explorer_source_py3 import AzureDataExplorerSource from .azure_my_sql_source_py3 import AzureMySqlSource from .distcp_settings_py3 import DistcpSettings from .hdfs_source_py3 import HdfsSource @@ -293,6 +322,8 @@ from .sql_dw_source_py3 import SqlDWSource from .stored_procedure_parameter_py3 import StoredProcedureParameter from .sql_source_py3 import SqlSource + from .rest_source_py3 import RestSource + from .sap_open_hub_source_py3 import SapOpenHubSource from .sap_ecc_source_py3 import SapEccSource from .sap_cloud_for_customer_source_py3 import SapCloudForCustomerSource from .salesforce_source_py3 import SalesforceSource @@ -322,10 +353,13 @@ from .staging_settings_py3 import StagingSettings from .tabular_translator_py3 import TabularTranslator from .copy_translator_py3 import CopyTranslator + from .cosmos_db_mongo_db_api_sink_py3 import CosmosDbMongoDbApiSink from .salesforce_sink_py3 import SalesforceSink + from .azure_data_explorer_sink_py3 import AzureDataExplorerSink from .dynamics_sink_py3 import DynamicsSink from .odbc_sink_py3 import OdbcSink from .azure_search_index_sink_py3 import AzureSearchIndexSink + from .azure_blob_fs_sink_py3 import AzureBlobFSSink from .azure_data_lake_store_sink_py3 import AzureDataLakeStoreSink from .oracle_sink_py3 import OracleSink from .polybase_settings_py3 import PolybaseSettings @@ -340,9 +374,11 @@ from .copy_sink_py3 import CopySink from .copy_activity_py3 import CopyActivity from .execution_activity_py3 import ExecutionActivity + from .web_hook_activity_py3 import WebHookActivity from .append_variable_activity_py3 import AppendVariableActivity from .set_variable_activity_py3 import SetVariableActivity from .filter_activity_py3 import FilterActivity + from .validation_activity_py3 import ValidationActivity from .until_activity_py3 import UntilActivity from .wait_activity_py3 import WaitActivity from .for_each_activity_py3 import ForEachActivity @@ -367,6 +403,13 @@ from .integration_runtime_compute_properties_py3 import IntegrationRuntimeComputeProperties from .managed_integration_runtime_py3 import ManagedIntegrationRuntime from .integration_runtime_node_ip_address_py3 import IntegrationRuntimeNodeIpAddress + from .ssis_variable_py3 import SsisVariable + from .ssis_environment_py3 import SsisEnvironment + from .ssis_parameter_py3 import SsisParameter + from .ssis_package_py3 import SsisPackage + from .ssis_environment_reference_py3 import SsisEnvironmentReference + from .ssis_project_py3 import SsisProject + from .ssis_folder_py3 import SsisFolder from .ssis_object_metadata_py3 import SsisObjectMetadata from .ssis_object_metadata_list_response_py3 import SsisObjectMetadataListResponse from .integration_runtime_node_monitoring_data_py3 import IntegrationRuntimeNodeMonitoringData @@ -460,6 +503,10 @@ from .schedule_trigger import ScheduleTrigger from .multiple_pipeline_trigger import MultiplePipelineTrigger from .azure_function_linked_service import AzureFunctionLinkedService + from .azure_data_explorer_linked_service import AzureDataExplorerLinkedService + from .google_ad_words_linked_service import GoogleAdWordsLinkedService + from .oracle_service_cloud_linked_service import OracleServiceCloudLinkedService + from .dynamics_ax_linked_service import DynamicsAXLinkedService from .responsys_linked_service import ResponsysLinkedService from .azure_databricks_linked_service import AzureDatabricksLinkedService from .azure_data_lake_analytics_linked_service import AzureDataLakeAnalyticsLinkedService @@ -503,10 +550,16 @@ from .custom_data_source_linked_service import CustomDataSourceLinkedService from .amazon_redshift_linked_service import AmazonRedshiftLinkedService from .amazon_s3_linked_service import AmazonS3LinkedService + from .rest_service_linked_service import RestServiceLinkedService + from .sap_open_hub_linked_service import SapOpenHubLinkedService from .sap_ecc_linked_service import SapEccLinkedService from .sap_cloud_for_customer_linked_service import SapCloudForCustomerLinkedService from .salesforce_linked_service import SalesforceLinkedService + from .office365_linked_service import Office365LinkedService + from .azure_blob_fs_linked_service import AzureBlobFSLinkedService from .azure_data_lake_store_linked_service import AzureDataLakeStoreLinkedService + from .cosmos_db_mongo_db_api_linked_service import CosmosDbMongoDbApiLinkedService + from .mongo_db_v2_linked_service import MongoDbV2LinkedService from .mongo_db_linked_service import MongoDbLinkedService from .cassandra_linked_service import CassandraLinkedService from .web_client_certificate_authentication import WebClientCertificateAuthentication @@ -537,6 +590,10 @@ from .azure_table_storage_linked_service import AzureTableStorageLinkedService from .azure_blob_storage_linked_service import AzureBlobStorageLinkedService from .azure_storage_linked_service import AzureStorageLinkedService + from .google_ad_words_object_dataset import GoogleAdWordsObjectDataset + from .azure_data_explorer_table_dataset import AzureDataExplorerTableDataset + from .oracle_service_cloud_object_dataset import OracleServiceCloudObjectDataset + from .dynamics_ax_resource_dataset import DynamicsAXResourceDataset from .responsys_object_dataset import ResponsysObjectDataset from .salesforce_marketing_cloud_object_dataset import SalesforceMarketingCloudObjectDataset from .vertica_table_dataset import VerticaTableDataset @@ -581,7 +638,9 @@ from .http_dataset import HttpDataset from .azure_search_index_dataset import AzureSearchIndexDataset from .web_table_dataset import WebTableDataset + from .rest_resource_dataset import RestResourceDataset from .sql_server_table_dataset import SqlServerTableDataset + from .sap_open_hub_table_dataset import SapOpenHubTableDataset from .sap_ecc_resource_dataset import SapEccResourceDataset from .sap_cloud_for_customer_resource_dataset import SapCloudForCustomerResourceDataset from .salesforce_object_dataset import SalesforceObjectDataset @@ -589,8 +648,12 @@ from .azure_my_sql_table_dataset import AzureMySqlTableDataset from .oracle_table_dataset import OracleTableDataset from .odata_resource_dataset import ODataResourceDataset + from .cosmos_db_mongo_db_api_collection_dataset import CosmosDbMongoDbApiCollectionDataset + from .mongo_db_v2_collection_dataset import MongoDbV2CollectionDataset from .mongo_db_collection_dataset import MongoDbCollectionDataset from .file_share_dataset import FileShareDataset + from .office365_dataset import Office365Dataset + from .azure_blob_fs_dataset import AzureBlobFSDataset from .azure_data_lake_store_dataset import AzureDataLakeStoreDataset from .dynamics_entity_dataset import DynamicsEntityDataset from .document_db_collection_dataset import DocumentDbCollectionDataset @@ -615,6 +678,9 @@ from .web_activity import WebActivity from .redshift_unload_settings import RedshiftUnloadSettings from .amazon_redshift_source import AmazonRedshiftSource + from .google_ad_words_source import GoogleAdWordsSource + from .oracle_service_cloud_source import OracleServiceCloudSource + from .dynamics_ax_source import DynamicsAXSource from .responsys_source import ResponsysSource from .salesforce_marketing_cloud_source import SalesforceMarketingCloudSource from .vertica_source import VerticaSource @@ -646,11 +712,17 @@ from .azure_postgre_sql_source import AzurePostgreSqlSource from .amazon_mws_source import AmazonMWSSource from .http_source import HttpSource + from .azure_blob_fs_source import AzureBlobFSSource from .azure_data_lake_store_source import AzureDataLakeStoreSource + from .office365_source import Office365Source + from .mongo_db_cursor_methods_properties import MongoDbCursorMethodsProperties + from .cosmos_db_mongo_db_api_source import CosmosDbMongoDbApiSource + from .mongo_db_v2_source import MongoDbV2Source from .mongo_db_source import MongoDbSource from .cassandra_source import CassandraSource from .web_source import WebSource from .oracle_source import OracleSource + from .azure_data_explorer_source import AzureDataExplorerSource from .azure_my_sql_source import AzureMySqlSource from .distcp_settings import DistcpSettings from .hdfs_source import HdfsSource @@ -658,6 +730,8 @@ from .sql_dw_source import SqlDWSource from .stored_procedure_parameter import StoredProcedureParameter from .sql_source import SqlSource + from .rest_source import RestSource + from .sap_open_hub_source import SapOpenHubSource from .sap_ecc_source import SapEccSource from .sap_cloud_for_customer_source import SapCloudForCustomerSource from .salesforce_source import SalesforceSource @@ -687,10 +761,13 @@ from .staging_settings import StagingSettings from .tabular_translator import TabularTranslator from .copy_translator import CopyTranslator + from .cosmos_db_mongo_db_api_sink import CosmosDbMongoDbApiSink from .salesforce_sink import SalesforceSink + from .azure_data_explorer_sink import AzureDataExplorerSink from .dynamics_sink import DynamicsSink from .odbc_sink import OdbcSink from .azure_search_index_sink import AzureSearchIndexSink + from .azure_blob_fs_sink import AzureBlobFSSink from .azure_data_lake_store_sink import AzureDataLakeStoreSink from .oracle_sink import OracleSink from .polybase_settings import PolybaseSettings @@ -705,9 +782,11 @@ from .copy_sink import CopySink from .copy_activity import CopyActivity from .execution_activity import ExecutionActivity + from .web_hook_activity import WebHookActivity from .append_variable_activity import AppendVariableActivity from .set_variable_activity import SetVariableActivity from .filter_activity import FilterActivity + from .validation_activity import ValidationActivity from .until_activity import UntilActivity from .wait_activity import WaitActivity from .for_each_activity import ForEachActivity @@ -732,6 +811,13 @@ from .integration_runtime_compute_properties import IntegrationRuntimeComputeProperties from .managed_integration_runtime import ManagedIntegrationRuntime from .integration_runtime_node_ip_address import IntegrationRuntimeNodeIpAddress + from .ssis_variable import SsisVariable + from .ssis_environment import SsisEnvironment + from .ssis_parameter import SsisParameter + from .ssis_package import SsisPackage + from .ssis_environment_reference import SsisEnvironmentReference + from .ssis_project import SsisProject + from .ssis_folder import SsisFolder from .ssis_object_metadata import SsisObjectMetadata from .ssis_object_metadata_list_response import SsisObjectMetadataListResponse from .integration_runtime_node_monitoring_data import IntegrationRuntimeNodeMonitoringData @@ -764,6 +850,7 @@ DayOfWeek, DaysOfWeek, RecurrenceFrequency, + GoogleAdWordsAuthenticationType, SparkServerType, SparkThriftTransportProtocol, SparkAuthenticationType, @@ -780,8 +867,10 @@ SftpAuthenticationType, FtpAuthenticationType, HttpAuthenticationType, + RestServiceAuthenticationType, MongoDbAuthenticationType, ODataAuthenticationType, + ODataAadServicePrincipalCredentialType, TeradataAuthenticationType, Db2AuthenticationType, SybaseAuthenticationType, @@ -798,6 +887,7 @@ CopyBehaviorType, PolybaseSettingsRejectType, SapCloudForCustomerSinkWriteBehavior, + WebHookActivityMethod, IntegrationRuntimeType, SelfHostedIntegrationRuntimeNodeStatus, IntegrationRuntimeUpdateResult, @@ -896,6 +986,10 @@ 'ScheduleTrigger', 'MultiplePipelineTrigger', 'AzureFunctionLinkedService', + 'AzureDataExplorerLinkedService', + 'GoogleAdWordsLinkedService', + 'OracleServiceCloudLinkedService', + 'DynamicsAXLinkedService', 'ResponsysLinkedService', 'AzureDatabricksLinkedService', 'AzureDataLakeAnalyticsLinkedService', @@ -939,10 +1033,16 @@ 'CustomDataSourceLinkedService', 'AmazonRedshiftLinkedService', 'AmazonS3LinkedService', + 'RestServiceLinkedService', + 'SapOpenHubLinkedService', 'SapEccLinkedService', 'SapCloudForCustomerLinkedService', 'SalesforceLinkedService', + 'Office365LinkedService', + 'AzureBlobFSLinkedService', 'AzureDataLakeStoreLinkedService', + 'CosmosDbMongoDbApiLinkedService', + 'MongoDbV2LinkedService', 'MongoDbLinkedService', 'CassandraLinkedService', 'WebClientCertificateAuthentication', @@ -973,6 +1073,10 @@ 'AzureTableStorageLinkedService', 'AzureBlobStorageLinkedService', 'AzureStorageLinkedService', + 'GoogleAdWordsObjectDataset', + 'AzureDataExplorerTableDataset', + 'OracleServiceCloudObjectDataset', + 'DynamicsAXResourceDataset', 'ResponsysObjectDataset', 'SalesforceMarketingCloudObjectDataset', 'VerticaTableDataset', @@ -1017,7 +1121,9 @@ 'HttpDataset', 'AzureSearchIndexDataset', 'WebTableDataset', + 'RestResourceDataset', 'SqlServerTableDataset', + 'SapOpenHubTableDataset', 'SapEccResourceDataset', 'SapCloudForCustomerResourceDataset', 'SalesforceObjectDataset', @@ -1025,8 +1131,12 @@ 'AzureMySqlTableDataset', 'OracleTableDataset', 'ODataResourceDataset', + 'CosmosDbMongoDbApiCollectionDataset', + 'MongoDbV2CollectionDataset', 'MongoDbCollectionDataset', 'FileShareDataset', + 'Office365Dataset', + 'AzureBlobFSDataset', 'AzureDataLakeStoreDataset', 'DynamicsEntityDataset', 'DocumentDbCollectionDataset', @@ -1051,6 +1161,9 @@ 'WebActivity', 'RedshiftUnloadSettings', 'AmazonRedshiftSource', + 'GoogleAdWordsSource', + 'OracleServiceCloudSource', + 'DynamicsAXSource', 'ResponsysSource', 'SalesforceMarketingCloudSource', 'VerticaSource', @@ -1082,11 +1195,17 @@ 'AzurePostgreSqlSource', 'AmazonMWSSource', 'HttpSource', + 'AzureBlobFSSource', 'AzureDataLakeStoreSource', + 'Office365Source', + 'MongoDbCursorMethodsProperties', + 'CosmosDbMongoDbApiSource', + 'MongoDbV2Source', 'MongoDbSource', 'CassandraSource', 'WebSource', 'OracleSource', + 'AzureDataExplorerSource', 'AzureMySqlSource', 'DistcpSettings', 'HdfsSource', @@ -1094,6 +1213,8 @@ 'SqlDWSource', 'StoredProcedureParameter', 'SqlSource', + 'RestSource', + 'SapOpenHubSource', 'SapEccSource', 'SapCloudForCustomerSource', 'SalesforceSource', @@ -1123,10 +1244,13 @@ 'StagingSettings', 'TabularTranslator', 'CopyTranslator', + 'CosmosDbMongoDbApiSink', 'SalesforceSink', + 'AzureDataExplorerSink', 'DynamicsSink', 'OdbcSink', 'AzureSearchIndexSink', + 'AzureBlobFSSink', 'AzureDataLakeStoreSink', 'OracleSink', 'PolybaseSettings', @@ -1141,9 +1265,11 @@ 'CopySink', 'CopyActivity', 'ExecutionActivity', + 'WebHookActivity', 'AppendVariableActivity', 'SetVariableActivity', 'FilterActivity', + 'ValidationActivity', 'UntilActivity', 'WaitActivity', 'ForEachActivity', @@ -1168,6 +1294,13 @@ 'IntegrationRuntimeComputeProperties', 'ManagedIntegrationRuntime', 'IntegrationRuntimeNodeIpAddress', + 'SsisVariable', + 'SsisEnvironment', + 'SsisParameter', + 'SsisPackage', + 'SsisEnvironmentReference', + 'SsisProject', + 'SsisFolder', 'SsisObjectMetadata', 'SsisObjectMetadataListResponse', 'IntegrationRuntimeNodeMonitoringData', @@ -1199,6 +1332,7 @@ 'DayOfWeek', 'DaysOfWeek', 'RecurrenceFrequency', + 'GoogleAdWordsAuthenticationType', 'SparkServerType', 'SparkThriftTransportProtocol', 'SparkAuthenticationType', @@ -1215,8 +1349,10 @@ 'SftpAuthenticationType', 'FtpAuthenticationType', 'HttpAuthenticationType', + 'RestServiceAuthenticationType', 'MongoDbAuthenticationType', 'ODataAuthenticationType', + 'ODataAadServicePrincipalCredentialType', 'TeradataAuthenticationType', 'Db2AuthenticationType', 'SybaseAuthenticationType', @@ -1233,6 +1369,7 @@ 'CopyBehaviorType', 'PolybaseSettingsRejectType', 'SapCloudForCustomerSinkWriteBehavior', + 'WebHookActivityMethod', 'IntegrationRuntimeType', 'SelfHostedIntegrationRuntimeNodeStatus', 'IntegrationRuntimeUpdateResult', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py index 4531b28777c6..b1e5ed533bba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py @@ -29,7 +29,7 @@ class AmazonMWSLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py index 421c20dc2d4a..a8db63933154 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py @@ -29,7 +29,7 @@ class AmazonMWSLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py index 1cabba2201c7..f9d034e610d4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py @@ -27,6 +27,10 @@ class AmazonMWSSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class AmazonMWSSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py index 895281f9af51..9ef7f5b30244 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py @@ -27,6 +27,10 @@ class AmazonMWSSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class AmazonMWSSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'AmazonMWSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py index a85e73b458ae..4272b28c13f5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py @@ -29,7 +29,7 @@ class AmazonRedshiftLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py index 7912ad040946..3b84583c6c86 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py @@ -29,7 +29,7 @@ class AmazonRedshiftLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py index 0fa9a82ff9db..d4fdfa4aa2ba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py @@ -27,6 +27,10 @@ class AmazonRedshiftSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType @@ -48,6 +52,7 @@ class AmazonRedshiftSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py index 9542e56e4850..9b34b2ef5b97 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py @@ -27,6 +27,10 @@ class AmazonRedshiftSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType @@ -48,13 +52,14 @@ class AmazonRedshiftSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, redshift_unload_settings=None, **kwargs) -> None: - super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, redshift_unload_settings=None, **kwargs) -> None: + super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.redshift_unload_settings = redshift_unload_settings self.type = 'AmazonRedshiftSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py index d6262a013b0d..e91a5ba26131 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py @@ -55,6 +55,12 @@ class AmazonS3Dataset(Dataset): :param version: The version for the S3 object. Type: string (or Expression with resultType string). :type version: object + :param modified_datetime_start: The start of S3 object's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of S3 object's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object :param format: The format of files. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the Amazon S3 @@ -82,6 +88,8 @@ class AmazonS3Dataset(Dataset): 'key': {'key': 'typeProperties.key', 'type': 'object'}, 'prefix': {'key': 'typeProperties.prefix', 'type': 'object'}, 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } @@ -92,6 +100,8 @@ def __init__(self, **kwargs): self.key = kwargs.get('key', None) self.prefix = kwargs.get('prefix', None) self.version = kwargs.get('version', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) self.format = kwargs.get('format', None) self.compression = kwargs.get('compression', None) self.type = 'AmazonS3Object' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py index 3936e9646a09..d84ae48b2a46 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py @@ -55,6 +55,12 @@ class AmazonS3Dataset(Dataset): :param version: The version for the S3 object. Type: string (or Expression with resultType string). :type version: object + :param modified_datetime_start: The start of S3 object's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of S3 object's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object :param format: The format of files. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the Amazon S3 @@ -82,16 +88,20 @@ class AmazonS3Dataset(Dataset): 'key': {'key': 'typeProperties.key', 'type': 'object'}, 'prefix': {'key': 'typeProperties.prefix', 'type': 'object'}, 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, *, linked_service_name, bucket_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, key=None, prefix=None, version=None, format=None, compression=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, bucket_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, key=None, prefix=None, version=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None, **kwargs) -> None: super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.bucket_name = bucket_name self.key = key self.prefix = prefix self.version = version + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end self.format = format self.compression = compression self.type = 'AmazonS3Object' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py index c9ff7261d915..250518c1a7ec 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py @@ -29,7 +29,7 @@ class AmazonS3LinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -40,6 +40,11 @@ class AmazonS3LinkedService(LinkedService): :param secret_access_key: The secret access key of the Amazon S3 Identity and Access Management (IAM) user. :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_url: This value specifies the endpoint to access with the + S3 Connector. This is an optional property; change it only if you want to + try a different service endpoint or want to switch between https and http. + Type: string (or Expression with resultType string). + :type service_url: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -59,6 +64,7 @@ class AmazonS3LinkedService(LinkedService): 'type': {'key': 'type', 'type': 'str'}, 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -66,5 +72,6 @@ def __init__(self, **kwargs): super(AmazonS3LinkedService, self).__init__(**kwargs) self.access_key_id = kwargs.get('access_key_id', None) self.secret_access_key = kwargs.get('secret_access_key', None) + self.service_url = kwargs.get('service_url', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AmazonS3' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py index 044e8bc299cf..8d136bb71fc0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py @@ -29,7 +29,7 @@ class AmazonS3LinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -40,6 +40,11 @@ class AmazonS3LinkedService(LinkedService): :param secret_access_key: The secret access key of the Amazon S3 Identity and Access Management (IAM) user. :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_url: This value specifies the endpoint to access with the + S3 Connector. This is an optional property; change it only if you want to + try a different service endpoint or want to switch between https and http. + Type: string (or Expression with resultType string). + :type service_url: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -59,12 +64,14 @@ class AmazonS3LinkedService(LinkedService): 'type': {'key': 'type', 'type': 'str'}, 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, service_url=None, encrypted_credential=None, **kwargs) -> None: super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.access_key_id = access_key_id self.secret_access_key = secret_access_key + self.service_url = service_url self.encrypted_credential = encrypted_credential self.type = 'AmazonS3' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py index 2fcf33e8d0c8..986023308e23 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py @@ -29,7 +29,7 @@ class AzureBatchLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py index 63724f76f13f..e7d33dfb342a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureBatchLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py index c3f4ffc118ba..01814cf8f9a9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py @@ -52,6 +52,12 @@ class AzureBlobDataset(Dataset): :param file_name: The name of the Azure Blob. Type: string (or Expression with resultType string). :type file_name: object + :param modified_datetime_start: The start of Azure Blob's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of Azure Blob's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object :param format: The format of the Azure Blob storage. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the blob storage. @@ -76,6 +82,8 @@ class AzureBlobDataset(Dataset): 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, 'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'}, 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } @@ -85,6 +93,8 @@ def __init__(self, **kwargs): self.folder_path = kwargs.get('folder_path', None) self.table_root_location = kwargs.get('table_root_location', None) self.file_name = kwargs.get('file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) self.format = kwargs.get('format', None) self.compression = kwargs.get('compression', None) self.type = 'AzureBlob' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py index 7567e1fba9fb..706c39deb289 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py @@ -52,6 +52,12 @@ class AzureBlobDataset(Dataset): :param file_name: The name of the Azure Blob. Type: string (or Expression with resultType string). :type file_name: object + :param modified_datetime_start: The start of Azure Blob's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of Azure Blob's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object :param format: The format of the Azure Blob storage. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the blob storage. @@ -76,15 +82,19 @@ class AzureBlobDataset(Dataset): 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, 'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'}, 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, table_root_location=None, file_name=None, format=None, compression=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, table_root_location=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None, **kwargs) -> None: super(AzureBlobDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.folder_path = folder_path self.table_root_location = table_root_location self.file_name = file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end self.format = format self.compression = compression self.type = 'AzureBlob' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py new file mode 100644 index 000000000000..0ef62ff7122f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureBlobFSDataset(Dataset): + """The Azure Data Lake Storage Gen2 storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. + Type: string (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the Azure Data Lake Storage Gen2. Type: + string (or Expression with resultType string). + :type file_name: object + :param format: The format of the Azure Data Lake Storage Gen2 storage. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the blob storage. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSDataset, self).__init__(**kwargs) + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) + self.type = 'AzureBlobFSFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py new file mode 100644 index 000000000000..82136a683fd3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureBlobFSDataset(Dataset): + """The Azure Data Lake Storage Gen2 storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. + Type: string (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the Azure Data Lake Storage Gen2. Type: + string (or Expression with resultType string). + :type file_name: object + :param format: The format of the Azure Data Lake Storage Gen2 storage. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the blob storage. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None, **kwargs) -> None: + super(AzureBlobFSDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.folder_path = folder_path + self.file_name = file_name + self.format = format + self.compression = compression + self.type = 'AzureBlobFSFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py new file mode 100644 index 000000000000..262ce976227b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureBlobFSLinkedService(LinkedService): + """Azure Data Lake Storage Gen2 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type url: object + :param account_key: Account key for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type account_key: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. Type: + string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.account_key = kwargs.get('account_key', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureBlobFS' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py new file mode 100644 index 000000000000..f0d555078bf7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureBlobFSLinkedService(LinkedService): + """Azure Data Lake Storage Gen2 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type url: object + :param account_key: Account key for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type account_key: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. Type: + string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, account_key=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureBlobFSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.account_key = account_key + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureBlobFS' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py new file mode 100644 index 000000000000..59e070c64fe8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureBlobFSSink(CopySink): + """A copy activity Azure Data Lake Storage Gen2 sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. Possible + values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' + :type copy_behavior: str or + ~azure.mgmt.datafactory.models.CopyBehaviorType + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSSink, self).__init__(**kwargs) + self.copy_behavior = kwargs.get('copy_behavior', None) + self.type = 'AzureBlobFSSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py new file mode 100644 index 000000000000..35ad6a97dbfe --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureBlobFSSink(CopySink): + """A copy activity Azure Data Lake Storage Gen2 sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. Possible + values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' + :type copy_behavior: str or + ~azure.mgmt.datafactory.models.CopyBehaviorType + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.copy_behavior = copy_behavior + self.type = 'AzureBlobFSSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py new file mode 100644 index 000000000000..0252ffd5ba8f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureBlobFSSource(CopySource): + """A copy activity Azure BlobFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param treat_empty_as_null: Treat empty as null. Type: boolean (or + Expression with resultType boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each + blob. Type: integer (or Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSSource, self).__init__(**kwargs) + self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) + self.skip_header_line_count = kwargs.get('skip_header_line_count', None) + self.recursive = kwargs.get('recursive', None) + self.type = 'AzureBlobFSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py new file mode 100644 index 000000000000..5b512c1f334f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureBlobFSSource(CopySource): + """A copy activity Azure BlobFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param treat_empty_as_null: Treat empty as null. Type: boolean (or + Expression with resultType boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each + blob. Type: integer (or Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: + super(AzureBlobFSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.treat_empty_as_null = treat_empty_as_null + self.skip_header_line_count = skip_header_line_count + self.recursive = recursive + self.type = 'AzureBlobFSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py index e4466c4ce9c9..5246e02ab9b4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py @@ -29,7 +29,7 @@ class AzureBlobStorageLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py index 4587e0c95dad..ba0a511532b4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureBlobStorageLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py new file mode 100644 index 000000000000..5e5a9f7560c6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureDataExplorerLinkedService(LinkedService): + """Azure Data Explorer (Kusto) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of Azure Data Explorer (the + engine's endpoint). URL will be in the format + https://..kusto.windows.net. Type: string (or + Expression with resultType string) + :type endpoint: object + :param service_principal_id: Required. The ID of the service principal + used to authenticate against Azure Data Explorer. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The key of the service principal + used to authenticate against Kusto. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param tenant: Required. The name or ID of the tenant to which the service + principal belongs. Type: string (or Expression with resultType string). + :type tenant: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'database': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.database = kwargs.get('database', None) + self.tenant = kwargs.get('tenant', None) + self.type = 'AzureDataExplorer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py new file mode 100644 index 000000000000..3cd8ab9c3c19 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureDataExplorerLinkedService(LinkedService): + """Azure Data Explorer (Kusto) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of Azure Data Explorer (the + engine's endpoint). URL will be in the format + https://..kusto.windows.net. Type: string (or + Expression with resultType string) + :type endpoint: object + :param service_principal_id: Required. The ID of the service principal + used to authenticate against Azure Data Explorer. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The key of the service principal + used to authenticate against Kusto. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param tenant: Required. The name or ID of the tenant to which the service + principal belongs. Type: string (or Expression with resultType string). + :type tenant: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'database': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + } + + def __init__(self, *, endpoint, service_principal_id, service_principal_key, database, tenant, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.database = database + self.tenant = tenant + self.type = 'AzureDataExplorer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py new file mode 100644 index 000000000000..5c204ab769e4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureDataExplorerSink(CopySink): + """A copy activity Azure Data Explorer sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param ingestion_mapping_name: A name of a pre-created csv mapping that + was defined on the target Kusto table. Type: string. + :type ingestion_mapping_name: object + :param ingestion_mapping_as_json: An explicit column mapping description + provided in a json format. Type: string. + :type ingestion_mapping_as_json: object + :param flush_immediately: If set to true, any aggregation will be skipped. + Default is false. Type: boolean. + :type flush_immediately: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, + 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, + 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerSink, self).__init__(**kwargs) + self.ingestion_mapping_name = kwargs.get('ingestion_mapping_name', None) + self.ingestion_mapping_as_json = kwargs.get('ingestion_mapping_as_json', None) + self.flush_immediately = kwargs.get('flush_immediately', None) + self.type = 'AzureDataExplorerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py new file mode 100644 index 000000000000..e5cb67bc79b8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureDataExplorerSink(CopySink): + """A copy activity Azure Data Explorer sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param ingestion_mapping_name: A name of a pre-created csv mapping that + was defined on the target Kusto table. Type: string. + :type ingestion_mapping_name: object + :param ingestion_mapping_as_json: An explicit column mapping description + provided in a json format. Type: string. + :type ingestion_mapping_as_json: object + :param flush_immediately: If set to true, any aggregation will be skipped. + Default is false. Type: boolean. + :type flush_immediately: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, + 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, + 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ingestion_mapping_name=None, ingestion_mapping_as_json=None, flush_immediately=None, **kwargs) -> None: + super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.ingestion_mapping_name = ingestion_mapping_name + self.ingestion_mapping_as_json = ingestion_mapping_as_json + self.flush_immediately = flush_immediately + self.type = 'AzureDataExplorerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py new file mode 100644 index 000000000000..2caaa517efd5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureDataExplorerSource(CopySource): + """A copy activity Azure Data Explorer (Kusto) source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Required. Database query. Should be a Kusto Query Language + (KQL) query. Type: string (or Expression with resultType string). + :type query: object + :param no_truncation: The name of the Boolean option that controls whether + truncation is applied to result-sets that go beyond a certain row-count + limit. + :type no_truncation: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + 'query': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.no_truncation = kwargs.get('no_truncation', None) + self.query_timeout = kwargs.get('query_timeout', None) + self.type = 'AzureDataExplorerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py new file mode 100644 index 000000000000..55a6bc78ee04 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureDataExplorerSource(CopySource): + """A copy activity Azure Data Explorer (Kusto) source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Required. Database query. Should be a Kusto Query Language + (KQL) query. Type: string (or Expression with resultType string). + :type query: object + :param no_truncation: The name of the Boolean option that controls whether + truncation is applied to result-sets that go beyond a certain row-count + limit. + :type no_truncation: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + 'query': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + def __init__(self, *, query, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, no_truncation=None, query_timeout=None, **kwargs) -> None: + super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.no_truncation = no_truncation + self.query_timeout = query_timeout + self.type = 'AzureDataExplorerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py new file mode 100644 index 000000000000..594d22171f48 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureDataExplorerTableDataset(Dataset): + """The Azure Data Explorer (Kusto) dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table: The table name of the Azure Data Explorer database. Type: + string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerTableDataset, self).__init__(**kwargs) + self.table = kwargs.get('table', None) + self.type = 'AzureDataExplorerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py new file mode 100644 index 000000000000..d36b0f39c2fe --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureDataExplorerTableDataset(Dataset): + """The Azure Data Explorer (Kusto) dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table: The table name of the Azure Data Explorer database. Type: + string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table=None, **kwargs) -> None: + super(AzureDataExplorerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table = table + self.type = 'AzureDataExplorerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py index 73ec2b6f9de9..0381e1b1de65 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py @@ -29,7 +29,7 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py index b6c4b993cae7..93250e2cef76 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py index e0299ba2bcad..de15057f78ed 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py @@ -43,8 +43,8 @@ class AzureDataLakeStoreDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param folder_path: Required. Path to the folder in the Azure Data Lake - Store. Type: string (or Expression with resultType string). + :param folder_path: Path to the folder in the Azure Data Lake Store. Type: + string (or Expression with resultType string). :type folder_path: object :param file_name: The name of the file in the Azure Data Lake Store. Type: string (or Expression with resultType string). @@ -59,7 +59,6 @@ class AzureDataLakeStoreDataset(Dataset): _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'folder_path': {'required': True}, } _attribute_map = { diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py index 62e761dc9695..d2df0ffebe7e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py @@ -43,8 +43,8 @@ class AzureDataLakeStoreDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param folder_path: Required. Path to the folder in the Azure Data Lake - Store. Type: string (or Expression with resultType string). + :param folder_path: Path to the folder in the Azure Data Lake Store. Type: + string (or Expression with resultType string). :type folder_path: object :param file_name: The name of the file in the Azure Data Lake Store. Type: string (or Expression with resultType string). @@ -59,7 +59,6 @@ class AzureDataLakeStoreDataset(Dataset): _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'folder_path': {'required': True}, } _attribute_map = { @@ -78,7 +77,7 @@ class AzureDataLakeStoreDataset(Dataset): 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, *, linked_service_name, folder_path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, file_name=None, format=None, compression=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None, **kwargs) -> None: super(AzureDataLakeStoreDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.folder_path = folder_path self.file_name = file_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py index 0c39866887ef..f08e086cb500 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py @@ -29,7 +29,7 @@ class AzureDataLakeStoreLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py index 10e3b72e654e..7b8ab293c0cf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureDataLakeStoreLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py index ceaabf438097..145c7c61358a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py @@ -34,6 +34,10 @@ class AzureDataLakeStoreSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. Possible @@ -52,6 +56,7 @@ class AzureDataLakeStoreSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py index 449c7b0a2a3e..d3e16339fef2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py @@ -34,6 +34,10 @@ class AzureDataLakeStoreSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. Possible @@ -52,11 +56,12 @@ class AzureDataLakeStoreSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, copy_behavior=None, **kwargs) -> None: - super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.copy_behavior = copy_behavior self.type = 'AzureDataLakeStoreSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py index 60a6599c8fbb..9d2046049a30 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py @@ -27,6 +27,10 @@ class AzureDataLakeStoreSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read @@ -43,6 +47,7 @@ class AzureDataLakeStoreSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py index d228d787bff4..e1d883972220 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py @@ -27,6 +27,10 @@ class AzureDataLakeStoreSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read @@ -43,11 +47,12 @@ class AzureDataLakeStoreSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, recursive=None, **kwargs) -> None: - super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, **kwargs) -> None: + super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.type = 'AzureDataLakeStoreSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py index c036b299fff0..6cc4c12674cb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py @@ -29,7 +29,7 @@ class AzureDatabricksLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -64,6 +64,16 @@ class AzureDatabricksLinkedService(LinkedService): :type new_cluster_spark_env_vars: dict[str, object] :param new_cluster_custom_tags: Additional tags for cluster resources. :type new_cluster_custom_tags: dict[str, object] + :param new_cluster_driver_node_type: The driver node type for the new + cluster. Type: string (or Expression with resultType string). + :type new_cluster_driver_node_type: object + :param new_cluster_init_scripts: User-defined initialization scripts for + the new cluster. Type: array of strings (or Expression with resultType + array of strings). + :type new_cluster_init_scripts: object + :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new + cluster. Type: boolean (or Expression with resultType boolean). + :type new_cluster_enable_elastic_disk: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -92,6 +102,9 @@ class AzureDatabricksLinkedService(LinkedService): 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, + 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, + 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, + 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -106,5 +119,8 @@ def __init__(self, **kwargs): self.new_cluster_spark_conf = kwargs.get('new_cluster_spark_conf', None) self.new_cluster_spark_env_vars = kwargs.get('new_cluster_spark_env_vars', None) self.new_cluster_custom_tags = kwargs.get('new_cluster_custom_tags', None) + self.new_cluster_driver_node_type = kwargs.get('new_cluster_driver_node_type', None) + self.new_cluster_init_scripts = kwargs.get('new_cluster_init_scripts', None) + self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AzureDatabricks' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py index 8060311a4e0d..6299dac1e3f2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureDatabricksLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -64,6 +64,16 @@ class AzureDatabricksLinkedService(LinkedService): :type new_cluster_spark_env_vars: dict[str, object] :param new_cluster_custom_tags: Additional tags for cluster resources. :type new_cluster_custom_tags: dict[str, object] + :param new_cluster_driver_node_type: The driver node type for the new + cluster. Type: string (or Expression with resultType string). + :type new_cluster_driver_node_type: object + :param new_cluster_init_scripts: User-defined initialization scripts for + the new cluster. Type: array of strings (or Expression with resultType + array of strings). + :type new_cluster_init_scripts: object + :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new + cluster. Type: boolean (or Expression with resultType boolean). + :type new_cluster_enable_elastic_disk: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -92,10 +102,13 @@ class AzureDatabricksLinkedService(LinkedService): 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, + 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, + 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, + 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, domain, access_token, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, existing_cluster_id=None, new_cluster_version=None, new_cluster_num_of_worker=None, new_cluster_node_type=None, new_cluster_spark_conf=None, new_cluster_spark_env_vars=None, new_cluster_custom_tags=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, domain, access_token, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, existing_cluster_id=None, new_cluster_version=None, new_cluster_num_of_worker=None, new_cluster_node_type=None, new_cluster_spark_conf=None, new_cluster_spark_env_vars=None, new_cluster_custom_tags=None, new_cluster_driver_node_type=None, new_cluster_init_scripts=None, new_cluster_enable_elastic_disk=None, encrypted_credential=None, **kwargs) -> None: super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.domain = domain self.access_token = access_token @@ -106,5 +119,8 @@ def __init__(self, *, domain, access_token, additional_properties=None, connect_ self.new_cluster_spark_conf = new_cluster_spark_conf self.new_cluster_spark_env_vars = new_cluster_spark_env_vars self.new_cluster_custom_tags = new_cluster_custom_tags + self.new_cluster_driver_node_type = new_cluster_driver_node_type + self.new_cluster_init_scripts = new_cluster_init_scripts + self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk self.encrypted_credential = encrypted_credential self.type = 'AzureDatabricks' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py index 44917d8d23b9..2ed5b870a778 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py @@ -29,7 +29,7 @@ class AzureFunctionLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py index b6b0f9600da1..a1bfdbe8b6c1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureFunctionLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py index c7ad622591ee..768f0d83ae93 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py @@ -29,7 +29,7 @@ class AzureKeyVaultLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py index e13cf7fb527a..50f4a58a5a1b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureKeyVaultLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py index a6a19be4069b..08dfec98a6bf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py @@ -29,7 +29,7 @@ class AzureMLLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py index 0fff3cea9b8a..c77a692adc03 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureMLLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py index 64a072f1f38b..aedbdbb73eb5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py @@ -29,7 +29,7 @@ class AzureMySqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py index dcf4861da573..57692275f564 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureMySqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py index 7409be73bd09..823336432567 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py @@ -27,6 +27,10 @@ class AzureMySqlSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType @@ -42,6 +46,7 @@ class AzureMySqlSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py index 4e1d35981f78..7030738d2615 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py @@ -27,6 +27,10 @@ class AzureMySqlSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType @@ -42,11 +46,12 @@ class AzureMySqlSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'AzureMySqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py index 89c9b29cdcde..92359d6d6a10 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py @@ -29,7 +29,7 @@ class AzurePostgreSqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py index e885498530ed..47f8f17980f8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py @@ -29,7 +29,7 @@ class AzurePostgreSqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py index 816e066ecebb..e0cd62fd8028 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py @@ -27,6 +27,10 @@ class AzurePostgreSqlSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class AzurePostgreSqlSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py index 2af53cf91da2..0362b0dca390 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py @@ -27,6 +27,10 @@ class AzurePostgreSqlSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class AzurePostgreSqlSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'AzurePostgreSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py index 5ecb911fb94a..9f3a63db4978 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py @@ -34,6 +34,10 @@ class AzureQueueSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ @@ -48,6 +52,7 @@ class AzureQueueSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py index debc14c0c7e1..db2fb60ddb1e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py @@ -34,6 +34,10 @@ class AzureQueueSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ @@ -48,9 +52,10 @@ class AzureQueueSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, **kwargs) -> None: - super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'AzureQueueSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py index c09cd94bfb51..9aae64af8da0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py @@ -34,6 +34,10 @@ class AzureSearchIndexSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specify the write behavior when upserting documents @@ -52,6 +56,7 @@ class AzureSearchIndexSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py index 9ed48b36a588..3cd887a2512c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py @@ -34,6 +34,10 @@ class AzureSearchIndexSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specify the write behavior when upserting documents @@ -52,11 +56,12 @@ class AzureSearchIndexSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, write_behavior=None, **kwargs) -> None: - super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.type = 'AzureSearchIndexSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py index 18979ed87ca0..782799cd5b28 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py @@ -29,7 +29,7 @@ class AzureSearchLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py index 6cc3cdc98b89..8589c3aead91 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureSearchLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py index 68ad549ed733..0da66637a04f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py @@ -29,7 +29,7 @@ class AzureSqlDatabaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py index afd58ae43354..dbcf6c88b134 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureSqlDatabaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py index d4aa961cb424..cc7c9d58d19f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py @@ -29,7 +29,7 @@ class AzureSqlDWLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py index a78551dff273..5c75f3904b37 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureSqlDWLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py index 711b09a80004..202dd7229b90 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py @@ -29,7 +29,7 @@ class AzureStorageLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py index 428fb82e871a..4fac19b70849 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureStorageLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py index faba497cc734..3459c9ad3ba1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py @@ -34,6 +34,10 @@ class AzureTableSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param azure_table_default_partition_key_value: Azure Table default @@ -60,6 +64,7 @@ class AzureTableSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py index 630df4f1f606..a15247544879 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py @@ -34,6 +34,10 @@ class AzureTableSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param azure_table_default_partition_key_value: Azure Table default @@ -60,6 +64,7 @@ class AzureTableSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, @@ -67,8 +72,8 @@ class AzureTableSink(CopySink): 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, azure_table_default_partition_key_value=None, azure_table_partition_key_name=None, azure_table_row_key_name=None, azure_table_insert_type=None, **kwargs) -> None: - super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, azure_table_default_partition_key_value=None, azure_table_partition_key_name=None, azure_table_row_key_name=None, azure_table_insert_type=None, **kwargs) -> None: + super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.azure_table_default_partition_key_value = azure_table_default_partition_key_value self.azure_table_partition_key_name = azure_table_partition_key_name self.azure_table_row_key_name = azure_table_row_key_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py index f4046c989f4e..fa7ead73eaa9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py @@ -27,6 +27,10 @@ class AzureTableSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param azure_table_source_query: Azure Table source query. Type: string @@ -46,6 +50,7 @@ class AzureTableSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py index 30ca05775f27..efbac5613219 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py @@ -27,6 +27,10 @@ class AzureTableSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param azure_table_source_query: Azure Table source query. Type: string @@ -46,13 +50,14 @@ class AzureTableSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, azure_table_source_query=None, azure_table_source_ignore_table_not_found=None, **kwargs) -> None: - super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, azure_table_source_query=None, azure_table_source_ignore_table_not_found=None, **kwargs) -> None: + super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.azure_table_source_query = azure_table_source_query self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found self.type = 'AzureTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py index 152fae6368a6..c2a8c2498ea6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py @@ -29,7 +29,7 @@ class AzureTableStorageLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py index 533ad3509483..8d4e62c4f3e6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureTableStorageLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py index 681cc44d278b..673d34167fed 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py @@ -30,6 +30,9 @@ class BlobEventsTrigger(MultiplePipelineTrigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. @@ -64,6 +67,7 @@ class BlobEventsTrigger(MultiplePipelineTrigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py index 08d9c542f4af..fb65a420a2cd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py @@ -30,6 +30,9 @@ class BlobEventsTrigger(MultiplePipelineTrigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. @@ -64,6 +67,7 @@ class BlobEventsTrigger(MultiplePipelineTrigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, @@ -72,8 +76,8 @@ class BlobEventsTrigger(MultiplePipelineTrigger): 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, } - def __init__(self, *, events, scope: str, additional_properties=None, description: str=None, pipelines=None, blob_path_begins_with: str=None, blob_path_ends_with: str=None, **kwargs) -> None: - super(BlobEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, pipelines=pipelines, **kwargs) + def __init__(self, *, events, scope: str, additional_properties=None, description: str=None, annotations=None, pipelines=None, blob_path_begins_with: str=None, blob_path_ends_with: str=None, **kwargs) -> None: + super(BlobEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) self.blob_path_begins_with = blob_path_begins_with self.blob_path_ends_with = blob_path_ends_with self.events = events diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py index fe90f5836faf..8a050cf9cc64 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py @@ -34,6 +34,10 @@ class BlobSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param blob_writer_overwrite_files: Blob writer overwrite files. Type: @@ -61,6 +65,7 @@ class BlobSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py index 1d6ac96aff6e..8fca0ac5cacc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py @@ -34,6 +34,10 @@ class BlobSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param blob_writer_overwrite_files: Blob writer overwrite files. Type: @@ -61,6 +65,7 @@ class BlobSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, @@ -68,8 +73,8 @@ class BlobSink(CopySink): 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None, **kwargs) -> None: - super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None, **kwargs) -> None: + super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.blob_writer_overwrite_files = blob_writer_overwrite_files self.blob_writer_date_time_format = blob_writer_date_time_format self.blob_writer_add_header = blob_writer_add_header diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py index f563d0af1e2d..ab4313a890cb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py @@ -27,6 +27,10 @@ class BlobSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param treat_empty_as_null: Treat empty as null. Type: boolean (or @@ -49,6 +53,7 @@ class BlobSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py index 5b9dc775f069..78d90cc61e13 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py @@ -27,6 +27,10 @@ class BlobSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param treat_empty_as_null: Treat empty as null. Type: boolean (or @@ -49,14 +53,15 @@ class BlobSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: - super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: + super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.treat_empty_as_null = treat_empty_as_null self.skip_header_line_count = skip_header_line_count self.recursive = recursive diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py index 6abdece68966..4fd5b5c17100 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py @@ -30,6 +30,9 @@ class BlobTrigger(MultiplePipelineTrigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. @@ -59,6 +62,7 @@ class BlobTrigger(MultiplePipelineTrigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py index 2c80ac605368..cccffd881bfb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py @@ -30,6 +30,9 @@ class BlobTrigger(MultiplePipelineTrigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. @@ -59,6 +62,7 @@ class BlobTrigger(MultiplePipelineTrigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, @@ -66,8 +70,8 @@ class BlobTrigger(MultiplePipelineTrigger): 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, } - def __init__(self, *, folder_path: str, max_concurrency: int, linked_service, additional_properties=None, description: str=None, pipelines=None, **kwargs) -> None: - super(BlobTrigger, self).__init__(additional_properties=additional_properties, description=description, pipelines=pipelines, **kwargs) + def __init__(self, *, folder_path: str, max_concurrency: int, linked_service, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: + super(BlobTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) self.folder_path = folder_path self.max_concurrency = max_concurrency self.linked_service = linked_service diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py index 974ce49a1c62..ebba2be42028 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py @@ -29,7 +29,7 @@ class CassandraLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py index dbc74f10002f..f22f303cc61d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py @@ -29,7 +29,7 @@ class CassandraLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py index fdd0a228d001..e7ba96c18682 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py @@ -27,6 +27,10 @@ class CassandraSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Should be a SQL-92 query expression or @@ -53,6 +57,7 @@ class CassandraSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py index 323d85d1e742..bd95d158b868 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py @@ -27,6 +27,10 @@ class CassandraSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Should be a SQL-92 query expression or @@ -53,13 +57,14 @@ class CassandraSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, consistency_level=None, **kwargs) -> None: - super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, consistency_level=None, **kwargs) -> None: + super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.consistency_level = consistency_level self.type = 'CassandraSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py index 7b85f1400ff6..04179d0d1f53 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py @@ -29,7 +29,7 @@ class ConcurLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py index 6e17a2c9cc8e..4411db6d2856 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py @@ -29,7 +29,7 @@ class ConcurLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py index f8053415520c..11ae557c0cda 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py @@ -27,6 +27,10 @@ class ConcurSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class ConcurSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py index db9104869417..ac8ae8fb5a91 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py @@ -27,6 +27,10 @@ class ConcurSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class ConcurSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'ConcurSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py index 16581581786b..2242bc36beb2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py @@ -16,9 +16,9 @@ class ControlActivity(Activity): """Base class for all control activities like IfCondition, ForEach , Until. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AppendVariableActivity, SetVariableActivity, - FilterActivity, UntilActivity, WaitActivity, ForEachActivity, - IfConditionActivity, ExecutePipelineActivity + sub-classes are: WebHookActivity, AppendVariableActivity, + SetVariableActivity, FilterActivity, ValidationActivity, UntilActivity, + WaitActivity, ForEachActivity, IfConditionActivity, ExecutePipelineActivity All required parameters must be populated in order to send to Azure. @@ -52,7 +52,7 @@ class ControlActivity(Activity): } _subtype_map = { - 'type': {'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} + 'type': {'WebHook': 'WebHookActivity', 'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Validation': 'ValidationActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py index 739d8b9c311b..0aabd99d741f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py @@ -16,9 +16,9 @@ class ControlActivity(Activity): """Base class for all control activities like IfCondition, ForEach , Until. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AppendVariableActivity, SetVariableActivity, - FilterActivity, UntilActivity, WaitActivity, ForEachActivity, - IfConditionActivity, ExecutePipelineActivity + sub-classes are: WebHookActivity, AppendVariableActivity, + SetVariableActivity, FilterActivity, ValidationActivity, UntilActivity, + WaitActivity, ForEachActivity, IfConditionActivity, ExecutePipelineActivity All required parameters must be populated in order to send to Azure. @@ -52,7 +52,7 @@ class ControlActivity(Activity): } _subtype_map = { - 'type': {'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} + 'type': {'WebHook': 'WebHookActivity', 'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Validation': 'ValidationActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} } def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py index 9182efe2469a..f73d34fcb3ce 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py @@ -65,6 +65,8 @@ class CopyActivity(ExecutionActivity): settings when EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings + :param preserve_rules: Preserve Rules. + :type preserve_rules: list[object] :param inputs: List of inputs for the activity. :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] :param outputs: List of outputs for the activity. @@ -96,6 +98,7 @@ class CopyActivity(ExecutionActivity): 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, + 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, } @@ -111,6 +114,7 @@ def __init__(self, **kwargs): self.data_integration_units = kwargs.get('data_integration_units', None) self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) + self.preserve_rules = kwargs.get('preserve_rules', None) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) self.type = 'Copy' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py index fd663bd71dc6..a02cd5d89e10 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py @@ -65,6 +65,8 @@ class CopyActivity(ExecutionActivity): settings when EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings + :param preserve_rules: Preserve Rules. + :type preserve_rules: list[object] :param inputs: List of inputs for the activity. :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] :param outputs: List of outputs for the activity. @@ -96,11 +98,12 @@ class CopyActivity(ExecutionActivity): 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, + 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, } - def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, inputs=None, outputs=None, **kwargs) -> None: + def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, preserve_rules=None, inputs=None, outputs=None, **kwargs) -> None: super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.source = source self.sink = sink @@ -111,6 +114,7 @@ def __init__(self, *, name: str, source, sink, additional_properties=None, descr self.data_integration_units = data_integration_units self.enable_skip_incompatible_row = enable_skip_incompatible_row self.redirect_incompatible_row_settings = redirect_incompatible_row_settings + self.preserve_rules = preserve_rules self.inputs = inputs self.outputs = outputs self.type = 'Copy' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py index 58b55bf39bbc..6f714f7947d1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py @@ -16,10 +16,11 @@ class CopySink(Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SalesforceSink, DynamicsSink, OdbcSink, - AzureSearchIndexSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, - SqlSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, - AzureTableSink, AzureQueueSink, SapCloudForCustomerSink + sub-classes are: CosmosDbMongoDbApiSink, SalesforceSink, + AzureDataExplorerSink, DynamicsSink, OdbcSink, AzureSearchIndexSink, + AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, SqlSink, + DocumentDbCollectionSink, FileSystemSink, BlobSink, AzureTableSink, + AzureQueueSink, SapCloudForCustomerSink All required parameters must be populated in order to send to Azure. @@ -40,6 +41,10 @@ class CopySink(Model): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ @@ -54,11 +59,12 @@ class CopySink(Model): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { - 'type': {'SalesforceSink': 'SalesforceSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink'} } def __init__(self, **kwargs): @@ -68,4 +74,5 @@ def __init__(self, **kwargs): self.write_batch_timeout = kwargs.get('write_batch_timeout', None) self.sink_retry_count = kwargs.get('sink_retry_count', None) self.sink_retry_wait = kwargs.get('sink_retry_wait', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py index 02dfd30c931e..ea4f320e9bc6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py @@ -16,10 +16,11 @@ class CopySink(Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SalesforceSink, DynamicsSink, OdbcSink, - AzureSearchIndexSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, - SqlSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, - AzureTableSink, AzureQueueSink, SapCloudForCustomerSink + sub-classes are: CosmosDbMongoDbApiSink, SalesforceSink, + AzureDataExplorerSink, DynamicsSink, OdbcSink, AzureSearchIndexSink, + AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, SqlSink, + DocumentDbCollectionSink, FileSystemSink, BlobSink, AzureTableSink, + AzureQueueSink, SapCloudForCustomerSink All required parameters must be populated in order to send to Azure. @@ -40,6 +41,10 @@ class CopySink(Model): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ @@ -54,18 +59,20 @@ class CopySink(Model): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { - 'type': {'SalesforceSink': 'SalesforceSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink'} } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: super(CopySink, self).__init__(**kwargs) self.additional_properties = additional_properties self.write_batch_size = write_batch_size self.write_batch_timeout = write_batch_timeout self.sink_retry_count = sink_retry_count self.sink_retry_wait = sink_retry_wait + self.max_concurrent_connections = max_concurrent_connections self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py index 9a11107fc8e8..4f3da1e8cf85 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py @@ -16,19 +16,21 @@ class CopySource(Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonRedshiftSource, ResponsysSource, + sub-classes are: AmazonRedshiftSource, GoogleAdWordsSource, + OracleServiceCloudSource, DynamicsAXSource, ResponsysSource, SalesforceMarketingCloudSource, VerticaSource, NetezzaSource, ZohoSource, XeroSource, SquareSource, SparkSource, ShopifySource, ServiceNowSource, QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource, MariaDBSource, MagentoSource, JiraSource, ImpalaSource, HubspotSource, HiveSource, HBaseSource, GreenplumSource, GoogleBigQuerySource, EloquaSource, DrillSource, CouchbaseSource, ConcurSource, - AzurePostgreSqlSource, AmazonMWSSource, HttpSource, - AzureDataLakeStoreSource, MongoDbSource, CassandraSource, WebSource, - OracleSource, AzureMySqlSource, HdfsSource, FileSystemSource, SqlDWSource, - SqlSource, SapEccSource, SapCloudForCustomerSource, SalesforceSource, - RelationalSource, DynamicsSource, DocumentDbCollectionSource, BlobSource, - AzureTableSource + AzurePostgreSqlSource, AmazonMWSSource, HttpSource, AzureBlobFSSource, + AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, + MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, OracleSource, + AzureDataExplorerSource, AzureMySqlSource, HdfsSource, FileSystemSource, + SqlDWSource, SqlSource, RestSource, SapOpenHubSource, SapEccSource, + SapCloudForCustomerSource, SalesforceSource, RelationalSource, + DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource All required parameters must be populated in order to send to Azure. @@ -42,6 +44,10 @@ class CopySource(Model): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ @@ -54,11 +60,12 @@ class CopySource(Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlSource': 'SqlSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource'} } def __init__(self, **kwargs): @@ -66,4 +73,5 @@ def __init__(self, **kwargs): self.additional_properties = kwargs.get('additional_properties', None) self.source_retry_count = kwargs.get('source_retry_count', None) self.source_retry_wait = kwargs.get('source_retry_wait', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py index 7c1a96b2897a..eb439548481a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py @@ -16,19 +16,21 @@ class CopySource(Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonRedshiftSource, ResponsysSource, + sub-classes are: AmazonRedshiftSource, GoogleAdWordsSource, + OracleServiceCloudSource, DynamicsAXSource, ResponsysSource, SalesforceMarketingCloudSource, VerticaSource, NetezzaSource, ZohoSource, XeroSource, SquareSource, SparkSource, ShopifySource, ServiceNowSource, QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource, MariaDBSource, MagentoSource, JiraSource, ImpalaSource, HubspotSource, HiveSource, HBaseSource, GreenplumSource, GoogleBigQuerySource, EloquaSource, DrillSource, CouchbaseSource, ConcurSource, - AzurePostgreSqlSource, AmazonMWSSource, HttpSource, - AzureDataLakeStoreSource, MongoDbSource, CassandraSource, WebSource, - OracleSource, AzureMySqlSource, HdfsSource, FileSystemSource, SqlDWSource, - SqlSource, SapEccSource, SapCloudForCustomerSource, SalesforceSource, - RelationalSource, DynamicsSource, DocumentDbCollectionSource, BlobSource, - AzureTableSource + AzurePostgreSqlSource, AmazonMWSSource, HttpSource, AzureBlobFSSource, + AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, + MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, OracleSource, + AzureDataExplorerSource, AzureMySqlSource, HdfsSource, FileSystemSource, + SqlDWSource, SqlSource, RestSource, SapOpenHubSource, SapEccSource, + SapCloudForCustomerSource, SalesforceSource, RelationalSource, + DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource All required parameters must be populated in order to send to Azure. @@ -42,6 +44,10 @@ class CopySource(Model): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ @@ -54,16 +60,18 @@ class CopySource(Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlSource': 'SqlSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource'} } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: super(CopySource, self).__init__(**kwargs) self.additional_properties = additional_properties self.source_retry_count = source_retry_count self.source_retry_wait = source_retry_wait + self.max_concurrent_connections = max_concurrent_connections self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py index ed9136eee5fe..6a8a462f6f46 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py @@ -29,7 +29,7 @@ class CosmosDbLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py index 3b951a68a65a..57dab80e06b9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py @@ -29,7 +29,7 @@ class CosmosDbLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py new file mode 100644 index 000000000000..d86648eb5eee --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class CosmosDbMongoDbApiCollectionDataset(Dataset): + """The CosmosDB (MongoDB API) database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the CosmosDB (MongoDB + API) database. Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiCollectionDataset, self).__init__(**kwargs) + self.collection = kwargs.get('collection', None) + self.type = 'CosmosDbMongoDbApiCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py new file mode 100644 index 000000000000..de2650fa14b4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class CosmosDbMongoDbApiCollectionDataset(Dataset): + """The CosmosDB (MongoDB API) database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the CosmosDB (MongoDB + API) database. Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, collection, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(CosmosDbMongoDbApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.collection = collection + self.type = 'CosmosDbMongoDbApiCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py new file mode 100644 index 000000000000..f76e7c5f5a41 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class CosmosDbMongoDbApiLinkedService(LinkedService): + """Linked service for CosmosDB (MongoDB API) data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The CosmosDB (MongoDB API) connection + string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the CosmosDB (MongoDB API) database + that you want to access. Type: string (or Expression with resultType + string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.database = kwargs.get('database', None) + self.type = 'CosmosDbMongoDbApi' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py new file mode 100644 index 000000000000..2a72bfce4ff2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class CosmosDbMongoDbApiLinkedService(LinkedService): + """Linked service for CosmosDB (MongoDB API) data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The CosmosDB (MongoDB API) connection + string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the CosmosDB (MongoDB API) database + that you want to access. Type: string (or Expression with resultType + string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.database = database + self.type = 'CosmosDbMongoDbApi' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py new file mode 100644 index 000000000000..0d40b52add80 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class CosmosDbMongoDbApiSink(CopySink): + """A copy activity sink for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Specifies whether the document with same key to be + overwritten (upsert) rather than throw exception (insert). The default + value is "insert". Type: string (or Expression with resultType string). + Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.type = 'CosmosDbMongoDbApiSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py new file mode 100644 index 000000000000..5db1ee5c9d36 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class CosmosDbMongoDbApiSink(CopySink): + """A copy activity sink for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Specifies whether the document with same key to be + overwritten (upsert) rather than throw exception (insert). The default + value is "insert". Type: string (or Expression with resultType string). + Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.type = 'CosmosDbMongoDbApiSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py new file mode 100644 index 000000000000..dae49c1d45eb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class CosmosDbMongoDbApiSource(CopySource): + """A copy activity source for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property�s + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiSource, self).__init__(**kwargs) + self.filter = kwargs.get('filter', None) + self.cursor_methods = kwargs.get('cursor_methods', None) + self.batch_size = kwargs.get('batch_size', None) + self.type = 'CosmosDbMongoDbApiSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py new file mode 100644 index 000000000000..a4c869645973 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class CosmosDbMongoDbApiSource(CopySource): + """A copy activity source for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property�s + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, **kwargs) -> None: + super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size + self.type = 'CosmosDbMongoDbApiSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py index f5c02a071718..76e45648f941 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py @@ -29,7 +29,7 @@ class CouchbaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py index 1507d6ab7b32..afe336f666de 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py @@ -29,7 +29,7 @@ class CouchbaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py index bfab638594a3..b355605417d1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py @@ -27,6 +27,10 @@ class CouchbaseSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class CouchbaseSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py index cc661253a13d..1358fc20feba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py @@ -27,6 +27,10 @@ class CouchbaseSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class CouchbaseSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'CouchbaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py index f7eceb72ff3b..01cfb7335d37 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py @@ -52,6 +52,9 @@ class CustomActivity(ExecutionActivity): custom activity has the full responsibility to consume and interpret the content defined. :type extended_properties: dict[str, object] + :param retention_time_in_days: The retention time for the files submitted + for custom activity. Type: double (or Expression with resultType double). + :type retention_time_in_days: object """ _validation = { @@ -74,6 +77,7 @@ class CustomActivity(ExecutionActivity): 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, + 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, } def __init__(self, **kwargs): @@ -83,4 +87,5 @@ def __init__(self, **kwargs): self.folder_path = kwargs.get('folder_path', None) self.reference_objects = kwargs.get('reference_objects', None) self.extended_properties = kwargs.get('extended_properties', None) + self.retention_time_in_days = kwargs.get('retention_time_in_days', None) self.type = 'Custom' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py index b82ac57bca4d..bf8326f053f2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py @@ -52,6 +52,9 @@ class CustomActivity(ExecutionActivity): custom activity has the full responsibility to consume and interpret the content defined. :type extended_properties: dict[str, object] + :param retention_time_in_days: The retention time for the files submitted + for custom activity. Type: double (or Expression with resultType double). + :type retention_time_in_days: object """ _validation = { @@ -74,13 +77,15 @@ class CustomActivity(ExecutionActivity): 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, + 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, } - def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, resource_linked_service=None, folder_path=None, reference_objects=None, extended_properties=None, **kwargs) -> None: + def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, resource_linked_service=None, folder_path=None, reference_objects=None, extended_properties=None, retention_time_in_days=None, **kwargs) -> None: super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.command = command self.resource_linked_service = resource_linked_service self.folder_path = folder_path self.reference_objects = reference_objects self.extended_properties = extended_properties + self.retention_time_in_days = retention_time_in_days self.type = 'Custom' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py index 4bc3a2863fc3..db14a05e7ad1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py @@ -29,7 +29,7 @@ class CustomDataSourceLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py index 2ec05f7a32d9..f7633ee28cbd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py @@ -29,7 +29,7 @@ class CustomDataSourceLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py index 8a6a8ac662a8..a242309c3fd1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py @@ -43,14 +43,13 @@ class CustomDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param type_properties: Required. Custom dataset properties. + :param type_properties: Custom dataset properties. :type type_properties: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'type_properties': {'required': True}, } _attribute_map = { diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py index da681e8360b8..c00dae2b2c56 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py @@ -43,14 +43,13 @@ class CustomDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param type_properties: Required. Custom dataset properties. + :param type_properties: Custom dataset properties. :type type_properties: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'type_properties': {'required': True}, } _attribute_map = { @@ -66,7 +65,7 @@ class CustomDataset(Dataset): 'type_properties': {'key': 'typeProperties', 'type': 'object'}, } - def __init__(self, *, linked_service_name, type_properties, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, type_properties=None, **kwargs) -> None: super(CustomDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type_properties = type_properties self.type = 'CustomDataset' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py index 2992964b1799..b372cb91d8ef 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py @@ -77,6 +77,8 @@ class RunQueryFilterOperand(str, Enum): activity_type = "ActivityType" trigger_name = "TriggerName" trigger_run_timestamp = "TriggerRunTimestamp" + run_group_id = "RunGroupId" + latest_only = "LatestOnly" class RunQueryFilterOperator(str, Enum): @@ -158,6 +160,12 @@ class RecurrenceFrequency(str, Enum): year = "Year" +class GoogleAdWordsAuthenticationType(str, Enum): + + service_authentication = "ServiceAuthentication" + user_authentication = "UserAuthentication" + + class SparkServerType(str, Enum): shark_server = "SharkServer" @@ -267,6 +275,14 @@ class HttpAuthenticationType(str, Enum): client_certificate = "ClientCertificate" +class RestServiceAuthenticationType(str, Enum): + + anonymous = "Anonymous" + basic = "Basic" + aad_service_principal = "AadServicePrincipal" + managed_service_identity = "ManagedServiceIdentity" + + class MongoDbAuthenticationType(str, Enum): basic = "Basic" @@ -277,6 +293,15 @@ class ODataAuthenticationType(str, Enum): basic = "Basic" anonymous = "Anonymous" + windows = "Windows" + aad_service_principal = "AadServicePrincipal" + managed_service_identity = "ManagedServiceIdentity" + + +class ODataAadServicePrincipalCredentialType(str, Enum): + + service_principal_key = "ServicePrincipalKey" + service_principal_cert = "ServicePrincipalCert" class TeradataAuthenticationType(str, Enum): @@ -395,6 +420,11 @@ class SapCloudForCustomerSinkWriteBehavior(str, Enum): update = "Update" +class WebHookActivityMethod(str, Enum): + + post = "POST" + + class IntegrationRuntimeType(str, Enum): managed = "Managed" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py index de812815bd26..af540b1e6429 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py @@ -17,26 +17,29 @@ class Dataset(Model): data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ResponsysObjectDataset, - SalesforceMarketingCloudObjectDataset, VerticaTableDataset, - NetezzaTableDataset, ZohoObjectDataset, XeroObjectDataset, - SquareObjectDataset, SparkObjectDataset, ShopifyObjectDataset, - ServiceNowObjectDataset, QuickBooksObjectDataset, PrestoObjectDataset, - PhoenixObjectDataset, PaypalObjectDataset, MarketoObjectDataset, - MariaDBTableDataset, MagentoObjectDataset, JiraObjectDataset, - ImpalaObjectDataset, HubspotObjectDataset, HiveObjectDataset, - HBaseObjectDataset, GreenplumTableDataset, GoogleBigQueryObjectDataset, - EloquaObjectDataset, DrillTableDataset, CouchbaseTableDataset, - ConcurObjectDataset, AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, - HttpDataset, AzureSearchIndexDataset, WebTableDataset, - SqlServerTableDataset, SapEccResourceDataset, + sub-classes are: GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, + OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, + ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, + VerticaTableDataset, NetezzaTableDataset, ZohoObjectDataset, + XeroObjectDataset, SquareObjectDataset, SparkObjectDataset, + ShopifyObjectDataset, ServiceNowObjectDataset, QuickBooksObjectDataset, + PrestoObjectDataset, PhoenixObjectDataset, PaypalObjectDataset, + MarketoObjectDataset, MariaDBTableDataset, MagentoObjectDataset, + JiraObjectDataset, ImpalaObjectDataset, HubspotObjectDataset, + HiveObjectDataset, HBaseObjectDataset, GreenplumTableDataset, + GoogleBigQueryObjectDataset, EloquaObjectDataset, DrillTableDataset, + CouchbaseTableDataset, ConcurObjectDataset, AzurePostgreSqlTableDataset, + AmazonMWSObjectDataset, HttpDataset, AzureSearchIndexDataset, + WebTableDataset, RestResourceDataset, SqlServerTableDataset, + SapOpenHubTableDataset, SapEccResourceDataset, SapCloudForCustomerResourceDataset, SalesforceObjectDataset, RelationalTableDataset, AzureMySqlTableDataset, OracleTableDataset, - ODataResourceDataset, MongoDbCollectionDataset, FileShareDataset, - AzureDataLakeStoreDataset, DynamicsEntityDataset, - DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, - AzureSqlDWTableDataset, AzureSqlTableDataset, AzureTableDataset, - AzureBlobDataset, AmazonS3Dataset + ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, + MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, + Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, + DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, + CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlTableDataset, + AzureTableDataset, AzureBlobDataset, AmazonS3Dataset All required parameters must be populated in order to send to Azure. @@ -86,7 +89,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py index 9538e6105a8f..adc64b228236 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py @@ -17,26 +17,29 @@ class Dataset(Model): data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ResponsysObjectDataset, - SalesforceMarketingCloudObjectDataset, VerticaTableDataset, - NetezzaTableDataset, ZohoObjectDataset, XeroObjectDataset, - SquareObjectDataset, SparkObjectDataset, ShopifyObjectDataset, - ServiceNowObjectDataset, QuickBooksObjectDataset, PrestoObjectDataset, - PhoenixObjectDataset, PaypalObjectDataset, MarketoObjectDataset, - MariaDBTableDataset, MagentoObjectDataset, JiraObjectDataset, - ImpalaObjectDataset, HubspotObjectDataset, HiveObjectDataset, - HBaseObjectDataset, GreenplumTableDataset, GoogleBigQueryObjectDataset, - EloquaObjectDataset, DrillTableDataset, CouchbaseTableDataset, - ConcurObjectDataset, AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, - HttpDataset, AzureSearchIndexDataset, WebTableDataset, - SqlServerTableDataset, SapEccResourceDataset, + sub-classes are: GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, + OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, + ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, + VerticaTableDataset, NetezzaTableDataset, ZohoObjectDataset, + XeroObjectDataset, SquareObjectDataset, SparkObjectDataset, + ShopifyObjectDataset, ServiceNowObjectDataset, QuickBooksObjectDataset, + PrestoObjectDataset, PhoenixObjectDataset, PaypalObjectDataset, + MarketoObjectDataset, MariaDBTableDataset, MagentoObjectDataset, + JiraObjectDataset, ImpalaObjectDataset, HubspotObjectDataset, + HiveObjectDataset, HBaseObjectDataset, GreenplumTableDataset, + GoogleBigQueryObjectDataset, EloquaObjectDataset, DrillTableDataset, + CouchbaseTableDataset, ConcurObjectDataset, AzurePostgreSqlTableDataset, + AmazonMWSObjectDataset, HttpDataset, AzureSearchIndexDataset, + WebTableDataset, RestResourceDataset, SqlServerTableDataset, + SapOpenHubTableDataset, SapEccResourceDataset, SapCloudForCustomerResourceDataset, SalesforceObjectDataset, RelationalTableDataset, AzureMySqlTableDataset, OracleTableDataset, - ODataResourceDataset, MongoDbCollectionDataset, FileShareDataset, - AzureDataLakeStoreDataset, DynamicsEntityDataset, - DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, - AzureSqlDWTableDataset, AzureSqlTableDataset, AzureTableDataset, - AzureBlobDataset, AmazonS3Dataset + ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, + MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, + Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, + DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, + CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlTableDataset, + AzureTableDataset, AzureBlobDataset, AmazonS3Dataset All required parameters must be populated in order to send to Azure. @@ -86,7 +89,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py index 9349bbcba5e0..d163d2b93c18 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py @@ -29,7 +29,7 @@ class Db2LinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py index d339860c3229..44d784fa9bde 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py @@ -29,7 +29,7 @@ class Db2LinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py index 43253aff51d0..25e80ee45466 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py @@ -34,6 +34,10 @@ class DocumentDbCollectionSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param nesting_separator: Nested properties separator. Default is . (dot). @@ -51,6 +55,7 @@ class DocumentDbCollectionSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py index 5377d4ed5aa5..111897036215 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py @@ -34,6 +34,10 @@ class DocumentDbCollectionSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param nesting_separator: Nested properties separator. Default is . (dot). @@ -51,11 +55,12 @@ class DocumentDbCollectionSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, nesting_separator=None, **kwargs) -> None: - super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, nesting_separator=None, **kwargs) -> None: + super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.nesting_separator = nesting_separator self.type = 'DocumentDbCollectionSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py index ac6bd77955c8..9fdd23f2795f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py @@ -27,6 +27,10 @@ class DocumentDbCollectionSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Documents query. Type: string (or Expression with resultType @@ -45,6 +49,7 @@ class DocumentDbCollectionSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py index 9c20bfbfa132..9e0bf6382b04 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py @@ -27,6 +27,10 @@ class DocumentDbCollectionSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Documents query. Type: string (or Expression with resultType @@ -45,13 +49,14 @@ class DocumentDbCollectionSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, nesting_separator=None, **kwargs) -> None: - super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, nesting_separator=None, **kwargs) -> None: + super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.nesting_separator = nesting_separator self.type = 'DocumentDbCollectionSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py index 52ad5888f5f2..c5428ace02a2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py @@ -29,7 +29,7 @@ class DrillLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py index b556d7e92be3..5fb0cb25ecdb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py @@ -29,7 +29,7 @@ class DrillLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py index c2e390308b81..9a3391f27786 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py @@ -27,6 +27,10 @@ class DrillSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class DrillSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py index ea67bbef64fb..313183abab83 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py @@ -27,6 +27,10 @@ class DrillSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class DrillSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'DrillSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py new file mode 100644 index 000000000000..5ff0b150718b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class DynamicsAXLinkedService(LinkedService): + """Dynamics AX linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The Dynamics AX (or Dynamics 365 Finance and + Operations) instance OData endpoint. + :type url: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + Mark this field as a SecureString to store it securely in Data Factory, or + reference a secret stored in Azure Key Vault. Type: string (or Expression + with resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. Specify the tenant information (domain name or + tenant ID) under which your application resides. Retrieve it by hovering + the mouse in the top-right corner of the Azure portal. Type: string (or + Expression with resultType string). + :type tenant: object + :param aad_resource_id: Required. Specify the resource you are requesting + authorization. Type: string (or Expression with resultType string). + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'tenant': {'required': True}, + 'aad_resource_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsAXLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.aad_resource_id = kwargs.get('aad_resource_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'DynamicsAX' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py new file mode 100644 index 000000000000..79d3a34ba313 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class DynamicsAXLinkedService(LinkedService): + """Dynamics AX linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The Dynamics AX (or Dynamics 365 Finance and + Operations) instance OData endpoint. + :type url: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + Mark this field as a SecureString to store it securely in Data Factory, or + reference a secret stored in Azure Key Vault. Type: string (or Expression + with resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. Specify the tenant information (domain name or + tenant ID) under which your application resides. Retrieve it by hovering + the mouse in the top-right corner of the Azure portal. Type: string (or + Expression with resultType string). + :type tenant: object + :param aad_resource_id: Required. Specify the resource you are requesting + authorization. Type: string (or Expression with resultType string). + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'tenant': {'required': True}, + 'aad_resource_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, service_principal_id, service_principal_key, tenant, aad_resource_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, encrypted_credential=None, **kwargs) -> None: + super(DynamicsAXLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.aad_resource_id = aad_resource_id + self.encrypted_credential = encrypted_credential + self.type = 'DynamicsAX' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py new file mode 100644 index 000000000000..233c4c99d4df --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class DynamicsAXResourceDataset(Dataset): + """The path of the Dynamics AX OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the Dynamics AX OData entity. Type: + string (or Expression with resultType string). + :type path: str + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DynamicsAXResourceDataset, self).__init__(**kwargs) + self.path = kwargs.get('path', None) + self.type = 'DynamicsAXResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py new file mode 100644 index 000000000000..788c9084ee9b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DynamicsAXResourceDataset(Dataset): + """The path of the Dynamics AX OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the Dynamics AX OData entity. Type: + string (or Expression with resultType string). + :type path: str + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'str'}, + } + + def __init__(self, *, linked_service_name, path: str, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(DynamicsAXResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.path = path + self.type = 'DynamicsAXResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py new file mode 100644 index 000000000000..619bad0f75c9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class DynamicsAXSource(CopySource): + """A copy activity Dynamics AX source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsAXSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'DynamicsAXSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py new file mode 100644 index 000000000000..7679e68bae7b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DynamicsAXSource(CopySource): + """A copy activity Dynamics AX source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'DynamicsAXSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py index 50ec75f79523..c925033d1240 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py @@ -29,7 +29,7 @@ class DynamicsLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py index 4971dabfba16..07c028ff2477 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py @@ -29,7 +29,7 @@ class DynamicsLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py index 7eb8be963583..45bac7b52064 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py @@ -37,6 +37,10 @@ class DynamicsSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. @@ -59,6 +63,7 @@ class DynamicsSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py index 2e2a64169797..5f736f9cf658 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py @@ -37,6 +37,10 @@ class DynamicsSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. @@ -59,6 +63,7 @@ class DynamicsSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -66,7 +71,7 @@ class DynamicsSink(CopySink): write_behavior = "Upsert" - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, ignore_null_values=None, **kwargs) -> None: - super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.ignore_null_values = ignore_null_values self.type = 'DynamicsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py index 09c04a8d09a6..d38f96fee911 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py @@ -27,6 +27,10 @@ class DynamicsSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: FetchXML is a proprietary query language that is used in @@ -43,6 +47,7 @@ class DynamicsSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py index 9c921cf40f3a..12d83625bc6a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py @@ -27,6 +27,10 @@ class DynamicsSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: FetchXML is a proprietary query language that is used in @@ -43,11 +47,12 @@ class DynamicsSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'DynamicsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py index ae14064ae523..6249c2e2334b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py @@ -29,7 +29,7 @@ class EloquaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py index 1c6bd97ecf9d..623d798036a3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py @@ -29,7 +29,7 @@ class EloquaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py index 694282ebcd8a..f016140189f1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py @@ -27,6 +27,10 @@ class EloquaSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class EloquaSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py index c9d96711743f..d200ff32fd9d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py @@ -27,6 +27,10 @@ class EloquaSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class EloquaSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'EloquaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py index c3c90f30935d..ffced5c2e689 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py @@ -29,7 +29,7 @@ class FileServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py index a9793d5b44fc..ec6fe58bb3a3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py @@ -29,7 +29,7 @@ class FileServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py index a851956ea319..6874f4c08929 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py @@ -49,6 +49,12 @@ class FileShareDataset(Dataset): :param file_name: The name of the on-premises file system. Type: string (or Expression with resultType string). :type file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object :param format: The format of the files. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param file_filter: Specify a filter to be used to select a subset of @@ -76,6 +82,8 @@ class FileShareDataset(Dataset): 'type': {'key': 'type', 'type': 'str'}, 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, 'file_filter': {'key': 'typeProperties.fileFilter', 'type': 'object'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, @@ -85,6 +93,8 @@ def __init__(self, **kwargs): super(FileShareDataset, self).__init__(**kwargs) self.folder_path = kwargs.get('folder_path', None) self.file_name = kwargs.get('file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) self.format = kwargs.get('format', None) self.file_filter = kwargs.get('file_filter', None) self.compression = kwargs.get('compression', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py index 675583ae2f2c..19e88a264e12 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py @@ -49,6 +49,12 @@ class FileShareDataset(Dataset): :param file_name: The name of the on-premises file system. Type: string (or Expression with resultType string). :type file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object :param format: The format of the files. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param file_filter: Specify a filter to be used to select a subset of @@ -76,15 +82,19 @@ class FileShareDataset(Dataset): 'type': {'key': 'type', 'type': 'str'}, 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, 'file_filter': {'key': 'typeProperties.fileFilter', 'type': 'object'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, file_filter=None, compression=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, file_filter=None, compression=None, **kwargs) -> None: super(FileShareDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.folder_path = folder_path self.file_name = file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end self.format = format self.file_filter = file_filter self.compression = compression diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py index 9f33cea7a261..75baab87456e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py @@ -34,6 +34,10 @@ class FileSystemSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. Possible @@ -52,6 +56,7 @@ class FileSystemSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py index a940e39878f8..92388128726e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py @@ -34,6 +34,10 @@ class FileSystemSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. Possible @@ -52,11 +56,12 @@ class FileSystemSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, copy_behavior=None, **kwargs) -> None: - super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.copy_behavior = copy_behavior self.type = 'FileSystemSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py index 1bbf97f1b31d..2986b1848153 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py @@ -27,6 +27,10 @@ class FileSystemSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read @@ -43,6 +47,7 @@ class FileSystemSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py index 6db0072329d4..0598490ca51c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py @@ -27,6 +27,10 @@ class FileSystemSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read @@ -43,11 +47,12 @@ class FileSystemSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, recursive=None, **kwargs) -> None: - super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, **kwargs) -> None: + super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.type = 'FileSystemSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py index 03a09f89c13e..e649ca56e37c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py @@ -29,7 +29,7 @@ class FtpServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py index 21fd1168165f..b38ad1c03f46 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py @@ -29,7 +29,7 @@ class FtpServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py new file mode 100644 index 000000000000..c460dd95c380 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py @@ -0,0 +1,119 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class GoogleAdWordsLinkedService(LinkedService): + """Google AdWords service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_customer_id: Required. The Client customer ID of the AdWords + account that you want to fetch report data for. + :type client_customer_id: object + :param developer_token: Required. The developer token associated with the + manager account that you use to grant access to the AdWords API. + :type developer_token: ~azure.mgmt.datafactory.models.SecretBase + :param authentication_type: Required. The OAuth 2.0 authentication + mechanism used for authentication. ServiceAuthentication can only be used + on self-hosted IR. Possible values include: 'ServiceAuthentication', + 'UserAuthentication' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType + :param refresh_token: The refresh token obtained from Google for + authorizing access to AdWords for UserAuthentication. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id of the google application used to acquire + the refresh token. + :type client_id: ~azure.mgmt.datafactory.models.SecretBase + :param client_secret: The client secret of the google application used to + acquire the refresh token. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param email: The service account email ID that is used for + ServiceAuthentication and can only be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to + authenticate the service account email address and can only be used on + self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_customer_id': {'required': True}, + 'developer_token': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, + 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleAdWordsLinkedService, self).__init__(**kwargs) + self.client_customer_id = kwargs.get('client_customer_id', None) + self.developer_token = kwargs.get('developer_token', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.refresh_token = kwargs.get('refresh_token', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.email = kwargs.get('email', None) + self.key_file_path = kwargs.get('key_file_path', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'GoogleAdWords' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py new file mode 100644 index 000000000000..dfb3bc07e69f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py @@ -0,0 +1,119 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class GoogleAdWordsLinkedService(LinkedService): + """Google AdWords service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_customer_id: Required. The Client customer ID of the AdWords + account that you want to fetch report data for. + :type client_customer_id: object + :param developer_token: Required. The developer token associated with the + manager account that you use to grant access to the AdWords API. + :type developer_token: ~azure.mgmt.datafactory.models.SecretBase + :param authentication_type: Required. The OAuth 2.0 authentication + mechanism used for authentication. ServiceAuthentication can only be used + on self-hosted IR. Possible values include: 'ServiceAuthentication', + 'UserAuthentication' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType + :param refresh_token: The refresh token obtained from Google for + authorizing access to AdWords for UserAuthentication. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id of the google application used to acquire + the refresh token. + :type client_id: ~azure.mgmt.datafactory.models.SecretBase + :param client_secret: The client secret of the google application used to + acquire the refresh token. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param email: The service account email ID that is used for + ServiceAuthentication and can only be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to + authenticate the service account email address and can only be used on + self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_customer_id': {'required': True}, + 'developer_token': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, + 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, client_customer_id, developer_token, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, refresh_token=None, client_id=None, client_secret=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None, **kwargs) -> None: + super(GoogleAdWordsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.client_customer_id = client_customer_id + self.developer_token = developer_token + self.authentication_type = authentication_type + self.refresh_token = refresh_token + self.client_id = client_id + self.client_secret = client_secret + self.email = email + self.key_file_path = key_file_path + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.encrypted_credential = encrypted_credential + self.type = 'GoogleAdWords' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py new file mode 100644 index 000000000000..92b901b774ed --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class GoogleAdWordsObjectDataset(Dataset): + """Google AdWords service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleAdWordsObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'GoogleAdWordsObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py new file mode 100644 index 000000000000..e1272f978b8e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class GoogleAdWordsObjectDataset(Dataset): + """Google AdWords service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(GoogleAdWordsObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'GoogleAdWordsObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py new file mode 100644 index 000000000000..8699057abe09 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class GoogleAdWordsSource(CopySource): + """A copy activity Google AdWords service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleAdWordsSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'GoogleAdWordsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py new file mode 100644 index 000000000000..995d5324670b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class GoogleAdWordsSource(CopySource): + """A copy activity Google AdWords service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'GoogleAdWordsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py index c9fa8239b452..45a535b95d43 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py @@ -29,7 +29,7 @@ class GoogleBigQueryLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py index a8582aca98b5..146674a85531 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py @@ -29,7 +29,7 @@ class GoogleBigQueryLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py index c0598d88a6ed..3a28d2563a8b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py @@ -27,6 +27,10 @@ class GoogleBigQuerySource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class GoogleBigQuerySource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py index eb5727bd43a5..49364b4d0e3f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py @@ -27,6 +27,10 @@ class GoogleBigQuerySource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class GoogleBigQuerySource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'GoogleBigQuerySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py index d3de7ccab502..57913f779ca1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py @@ -29,7 +29,7 @@ class GreenplumLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py index 886d38718ecd..bd707a5e85c9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py @@ -29,7 +29,7 @@ class GreenplumLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py index a463ff2c3482..086f12419f4a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py @@ -27,6 +27,10 @@ class GreenplumSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class GreenplumSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py index 6a373bf9d6ae..8b789deb43da 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py @@ -27,6 +27,10 @@ class GreenplumSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class GreenplumSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'GreenplumSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py index 4d7f3bf5ccb6..b6affd5caa0d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py @@ -29,7 +29,7 @@ class HBaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py index 7963b3fc643c..a8823e2e8937 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py @@ -29,7 +29,7 @@ class HBaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py index cc2c4fd1a843..eb6e3f1789bb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py @@ -27,6 +27,10 @@ class HBaseSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class HBaseSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py index c17d8cf07003..b2680e95c212 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py @@ -27,6 +27,10 @@ class HBaseSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class HBaseSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'HBaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py index b18a138a855e..810525342d82 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py @@ -29,7 +29,7 @@ class HDInsightLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -55,6 +55,10 @@ class HDInsightLinkedService(LinkedService): :param is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security Package). Type: Boolean. :type is_esp_enabled: object + :param file_system: Specify the FileSystem if the main storage for the + HDInsight is ADLS Gen2. Type: string (or Expression with resultType + string). + :type file_system: object """ _validation = { @@ -76,6 +80,7 @@ class HDInsightLinkedService(LinkedService): 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, + 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, } def __init__(self, **kwargs): @@ -87,4 +92,5 @@ def __init__(self, **kwargs): self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.is_esp_enabled = kwargs.get('is_esp_enabled', None) + self.file_system = kwargs.get('file_system', None) self.type = 'HDInsight' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py index 769cf031a403..5c384f7d6288 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py @@ -29,7 +29,7 @@ class HDInsightLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -55,6 +55,10 @@ class HDInsightLinkedService(LinkedService): :param is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security Package). Type: Boolean. :type is_esp_enabled: object + :param file_system: Specify the FileSystem if the main storage for the + HDInsight is ADLS Gen2. Type: string (or Expression with resultType + string). + :type file_system: object """ _validation = { @@ -76,9 +80,10 @@ class HDInsightLinkedService(LinkedService): 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, + 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, } - def __init__(self, *, cluster_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, linked_service_name=None, hcatalog_linked_service_name=None, encrypted_credential=None, is_esp_enabled=None, **kwargs) -> None: + def __init__(self, *, cluster_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, linked_service_name=None, hcatalog_linked_service_name=None, encrypted_credential=None, is_esp_enabled=None, file_system=None, **kwargs) -> None: super(HDInsightLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.cluster_uri = cluster_uri self.user_name = user_name @@ -87,4 +92,5 @@ def __init__(self, *, cluster_uri, additional_properties=None, connect_via=None, self.hcatalog_linked_service_name = hcatalog_linked_service_name self.encrypted_credential = encrypted_credential self.is_esp_enabled = is_esp_enabled + self.file_system = file_system self.type = 'HDInsight' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py index bd84aabc5012..d386aac9d9aa 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py @@ -29,7 +29,7 @@ class HDInsightOnDemandLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -136,6 +136,14 @@ class HDInsightOnDemandLinkedService(LinkedService): cluster once it's up. Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] + :param virtual_network_id: The ARM resource ID for the vNet to which the + cluster should be joined after creation. Type: string (or Expression with + resultType string). + :type virtual_network_id: object + :param subnet_name: The ARM resource ID for the subnet in the vNet. If + virtualNetworkId was specified, then this property is required. Type: + string (or Expression with resultType string). + :type subnet_name: object """ _validation = { @@ -187,6 +195,8 @@ class HDInsightOnDemandLinkedService(LinkedService): 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, + 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, + 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, } def __init__(self, **kwargs): @@ -222,4 +232,6 @@ def __init__(self, **kwargs): self.data_node_size = kwargs.get('data_node_size', None) self.zookeeper_node_size = kwargs.get('zookeeper_node_size', None) self.script_actions = kwargs.get('script_actions', None) + self.virtual_network_id = kwargs.get('virtual_network_id', None) + self.subnet_name = kwargs.get('subnet_name', None) self.type = 'HDInsightOnDemand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py index 5566a022bda2..178585c9b51d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py @@ -29,7 +29,7 @@ class HDInsightOnDemandLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -136,6 +136,14 @@ class HDInsightOnDemandLinkedService(LinkedService): cluster once it's up. Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] + :param virtual_network_id: The ARM resource ID for the vNet to which the + cluster should be joined after creation. Type: string (or Expression with + resultType string). + :type virtual_network_id: object + :param subnet_name: The ARM resource ID for the subnet in the vNet. If + virtualNetworkId was specified, then this property is required. Type: + string (or Expression with resultType string). + :type subnet_name: object """ _validation = { @@ -187,9 +195,11 @@ class HDInsightOnDemandLinkedService(LinkedService): 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, + 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, + 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, } - def __init__(self, *, cluster_size, time_to_live, version, linked_service_name, host_subscription_id, tenant, cluster_resource_group, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, cluster_name_prefix=None, cluster_user_name=None, cluster_password=None, cluster_ssh_user_name=None, cluster_ssh_password=None, additional_linked_service_names=None, hcatalog_linked_service_name=None, cluster_type=None, spark_version=None, core_configuration=None, h_base_configuration=None, hdfs_configuration=None, hive_configuration=None, map_reduce_configuration=None, oozie_configuration=None, storm_configuration=None, yarn_configuration=None, encrypted_credential=None, head_node_size=None, data_node_size=None, zookeeper_node_size=None, script_actions=None, **kwargs) -> None: + def __init__(self, *, cluster_size, time_to_live, version, linked_service_name, host_subscription_id, tenant, cluster_resource_group, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, cluster_name_prefix=None, cluster_user_name=None, cluster_password=None, cluster_ssh_user_name=None, cluster_ssh_password=None, additional_linked_service_names=None, hcatalog_linked_service_name=None, cluster_type=None, spark_version=None, core_configuration=None, h_base_configuration=None, hdfs_configuration=None, hive_configuration=None, map_reduce_configuration=None, oozie_configuration=None, storm_configuration=None, yarn_configuration=None, encrypted_credential=None, head_node_size=None, data_node_size=None, zookeeper_node_size=None, script_actions=None, virtual_network_id=None, subnet_name=None, **kwargs) -> None: super(HDInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.cluster_size = cluster_size self.time_to_live = time_to_live @@ -222,4 +232,6 @@ def __init__(self, *, cluster_size, time_to_live, version, linked_service_name, self.data_node_size = data_node_size self.zookeeper_node_size = zookeeper_node_size self.script_actions = script_actions + self.virtual_network_id = virtual_network_id + self.subnet_name = subnet_name self.type = 'HDInsightOnDemand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py index ab26ae10fe8c..b527f05a7e2f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py @@ -29,7 +29,7 @@ class HdfsLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py index 3b854d945e27..e004701e1da0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py @@ -29,7 +29,7 @@ class HdfsLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py index 1322a0e68cea..be50590f6c32 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py @@ -27,6 +27,10 @@ class HdfsSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read @@ -45,6 +49,7 @@ class HdfsSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py index 34b194f92d64..3c60cab46289 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py @@ -27,6 +27,10 @@ class HdfsSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read @@ -45,13 +49,14 @@ class HdfsSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, recursive=None, distcp_settings=None, **kwargs) -> None: - super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, distcp_settings=None, **kwargs) -> None: + super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.distcp_settings = distcp_settings self.type = 'HdfsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py index 57b40c30304a..c54c1393d56e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py @@ -29,7 +29,7 @@ class HiveLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py index 2f742d72594c..611d30ecb781 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py @@ -29,7 +29,7 @@ class HiveLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py index ad7cd5dc5a8a..3af88c3280e3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py @@ -27,6 +27,10 @@ class HiveSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class HiveSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py index 7dc54994b25a..6c09191b8c1b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py @@ -27,6 +27,10 @@ class HiveSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class HiveSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'HiveSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py index 86d6a072925e..6232bc45fee4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py @@ -29,7 +29,7 @@ class HttpLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py index bd4f03006513..7f70adb08425 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py @@ -29,7 +29,7 @@ class HttpLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py index 8c4a6ef6b8d7..ae131aa16c8c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py @@ -27,6 +27,10 @@ class HttpSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param http_request_timeout: Specifies the timeout for a HTTP client to @@ -45,6 +49,7 @@ class HttpSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py index 78bfe7216da6..df339fc3aef7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py @@ -27,6 +27,10 @@ class HttpSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param http_request_timeout: Specifies the timeout for a HTTP client to @@ -45,11 +49,12 @@ class HttpSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, http_request_timeout=None, **kwargs) -> None: - super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, http_request_timeout=None, **kwargs) -> None: + super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.http_request_timeout = http_request_timeout self.type = 'HttpSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py index 08af04633c12..3d0d6cb3a6f4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py @@ -29,7 +29,7 @@ class HubspotLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py index 93f66cd8e17b..272d613e9cd1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py @@ -29,7 +29,7 @@ class HubspotLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py index bca6b525860c..b4b4c618c33e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py @@ -27,6 +27,10 @@ class HubspotSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class HubspotSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py index cfc2d2d815b5..a29811342ce0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py @@ -27,6 +27,10 @@ class HubspotSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class HubspotSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'HubspotSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py index fdc471ea225f..a704852652db 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py @@ -29,7 +29,7 @@ class ImpalaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py index 9d79f13b9708..55b2e0c861d7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py @@ -29,7 +29,7 @@ class ImpalaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py index dec8e843d0c6..9e27dbdb6266 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py @@ -27,6 +27,10 @@ class ImpalaSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class ImpalaSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py index 5bdb3391c2fc..f7dc4016d020 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py @@ -27,6 +27,10 @@ class ImpalaSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class ImpalaSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'ImpalaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py index d8b9a62fc878..517cdd63caa5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py @@ -29,7 +29,7 @@ class JiraLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py index 69606ee7cfcf..82dc8d578da3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py @@ -29,7 +29,7 @@ class JiraLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py index 7bb6a8649b8f..709da0ce1205 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py @@ -27,6 +27,10 @@ class JiraSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class JiraSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py index 1a19ed99c55a..c958c8351bb3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py @@ -27,6 +27,10 @@ class JiraSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class JiraSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'JiraSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py index 62f172fded76..3d4660d72e89 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py @@ -18,34 +18,38 @@ class LinkedService(Model): resource. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionLinkedService, ResponsysLinkedService, - AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, - HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, - NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, - XeroLinkedService, SquareLinkedService, SparkLinkedService, - ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, - PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, - MarketoLinkedService, MariaDBLinkedService, MagentoLinkedService, - JiraLinkedService, ImpalaLinkedService, HubspotLinkedService, - HiveLinkedService, HBaseLinkedService, GreenplumLinkedService, - GoogleBigQueryLinkedService, EloquaLinkedService, DrillLinkedService, - CouchbaseLinkedService, ConcurLinkedService, AzurePostgreSqlLinkedService, - AmazonMWSLinkedService, SapHanaLinkedService, SapBWLinkedService, - SftpServerLinkedService, FtpServerLinkedService, HttpLinkedService, - AzureSearchLinkedService, CustomDataSourceLinkedService, - AmazonRedshiftLinkedService, AmazonS3LinkedService, SapEccLinkedService, - SapCloudForCustomerLinkedService, SalesforceLinkedService, - AzureDataLakeStoreLinkedService, MongoDbLinkedService, - CassandraLinkedService, WebLinkedService, ODataLinkedService, - HdfsLinkedService, OdbcLinkedService, AzureMLLinkedService, - TeradataLinkedService, Db2LinkedService, SybaseLinkedService, - PostgreSqlLinkedService, MySqlLinkedService, AzureMySqlLinkedService, - OracleLinkedService, FileServerLinkedService, HDInsightLinkedService, - DynamicsLinkedService, CosmosDbLinkedService, AzureKeyVaultLinkedService, - AzureBatchLinkedService, AzureSqlDatabaseLinkedService, - SqlServerLinkedService, AzureSqlDWLinkedService, - AzureTableStorageLinkedService, AzureBlobStorageLinkedService, - AzureStorageLinkedService + sub-classes are: AzureFunctionLinkedService, + AzureDataExplorerLinkedService, GoogleAdWordsLinkedService, + OracleServiceCloudLinkedService, DynamicsAXLinkedService, + ResponsysLinkedService, AzureDatabricksLinkedService, + AzureDataLakeAnalyticsLinkedService, HDInsightOnDemandLinkedService, + SalesforceMarketingCloudLinkedService, NetezzaLinkedService, + VerticaLinkedService, ZohoLinkedService, XeroLinkedService, + SquareLinkedService, SparkLinkedService, ShopifyLinkedService, + ServiceNowLinkedService, QuickBooksLinkedService, PrestoLinkedService, + PhoenixLinkedService, PaypalLinkedService, MarketoLinkedService, + MariaDBLinkedService, MagentoLinkedService, JiraLinkedService, + ImpalaLinkedService, HubspotLinkedService, HiveLinkedService, + HBaseLinkedService, GreenplumLinkedService, GoogleBigQueryLinkedService, + EloquaLinkedService, DrillLinkedService, CouchbaseLinkedService, + ConcurLinkedService, AzurePostgreSqlLinkedService, AmazonMWSLinkedService, + SapHanaLinkedService, SapBWLinkedService, SftpServerLinkedService, + FtpServerLinkedService, HttpLinkedService, AzureSearchLinkedService, + CustomDataSourceLinkedService, AmazonRedshiftLinkedService, + AmazonS3LinkedService, RestServiceLinkedService, SapOpenHubLinkedService, + SapEccLinkedService, SapCloudForCustomerLinkedService, + SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, + AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, + MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, + WebLinkedService, ODataLinkedService, HdfsLinkedService, OdbcLinkedService, + AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, + SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, + AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, + HDInsightLinkedService, DynamicsLinkedService, CosmosDbLinkedService, + AzureKeyVaultLinkedService, AzureBatchLinkedService, + AzureSqlDatabaseLinkedService, SqlServerLinkedService, + AzureSqlDWLinkedService, AzureTableStorageLinkedService, + AzureBlobStorageLinkedService, AzureStorageLinkedService All required parameters must be populated in order to send to Azure. @@ -61,7 +65,7 @@ class LinkedService(Model): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -81,7 +85,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py index ff4bb8c7605d..eadf4030e132 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py @@ -18,34 +18,38 @@ class LinkedService(Model): resource. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionLinkedService, ResponsysLinkedService, - AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, - HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, - NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, - XeroLinkedService, SquareLinkedService, SparkLinkedService, - ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, - PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, - MarketoLinkedService, MariaDBLinkedService, MagentoLinkedService, - JiraLinkedService, ImpalaLinkedService, HubspotLinkedService, - HiveLinkedService, HBaseLinkedService, GreenplumLinkedService, - GoogleBigQueryLinkedService, EloquaLinkedService, DrillLinkedService, - CouchbaseLinkedService, ConcurLinkedService, AzurePostgreSqlLinkedService, - AmazonMWSLinkedService, SapHanaLinkedService, SapBWLinkedService, - SftpServerLinkedService, FtpServerLinkedService, HttpLinkedService, - AzureSearchLinkedService, CustomDataSourceLinkedService, - AmazonRedshiftLinkedService, AmazonS3LinkedService, SapEccLinkedService, - SapCloudForCustomerLinkedService, SalesforceLinkedService, - AzureDataLakeStoreLinkedService, MongoDbLinkedService, - CassandraLinkedService, WebLinkedService, ODataLinkedService, - HdfsLinkedService, OdbcLinkedService, AzureMLLinkedService, - TeradataLinkedService, Db2LinkedService, SybaseLinkedService, - PostgreSqlLinkedService, MySqlLinkedService, AzureMySqlLinkedService, - OracleLinkedService, FileServerLinkedService, HDInsightLinkedService, - DynamicsLinkedService, CosmosDbLinkedService, AzureKeyVaultLinkedService, - AzureBatchLinkedService, AzureSqlDatabaseLinkedService, - SqlServerLinkedService, AzureSqlDWLinkedService, - AzureTableStorageLinkedService, AzureBlobStorageLinkedService, - AzureStorageLinkedService + sub-classes are: AzureFunctionLinkedService, + AzureDataExplorerLinkedService, GoogleAdWordsLinkedService, + OracleServiceCloudLinkedService, DynamicsAXLinkedService, + ResponsysLinkedService, AzureDatabricksLinkedService, + AzureDataLakeAnalyticsLinkedService, HDInsightOnDemandLinkedService, + SalesforceMarketingCloudLinkedService, NetezzaLinkedService, + VerticaLinkedService, ZohoLinkedService, XeroLinkedService, + SquareLinkedService, SparkLinkedService, ShopifyLinkedService, + ServiceNowLinkedService, QuickBooksLinkedService, PrestoLinkedService, + PhoenixLinkedService, PaypalLinkedService, MarketoLinkedService, + MariaDBLinkedService, MagentoLinkedService, JiraLinkedService, + ImpalaLinkedService, HubspotLinkedService, HiveLinkedService, + HBaseLinkedService, GreenplumLinkedService, GoogleBigQueryLinkedService, + EloquaLinkedService, DrillLinkedService, CouchbaseLinkedService, + ConcurLinkedService, AzurePostgreSqlLinkedService, AmazonMWSLinkedService, + SapHanaLinkedService, SapBWLinkedService, SftpServerLinkedService, + FtpServerLinkedService, HttpLinkedService, AzureSearchLinkedService, + CustomDataSourceLinkedService, AmazonRedshiftLinkedService, + AmazonS3LinkedService, RestServiceLinkedService, SapOpenHubLinkedService, + SapEccLinkedService, SapCloudForCustomerLinkedService, + SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, + AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, + MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, + WebLinkedService, ODataLinkedService, HdfsLinkedService, OdbcLinkedService, + AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, + SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, + AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, + HDInsightLinkedService, DynamicsLinkedService, CosmosDbLinkedService, + AzureKeyVaultLinkedService, AzureBatchLinkedService, + AzureSqlDatabaseLinkedService, SqlServerLinkedService, + AzureSqlDWLinkedService, AzureTableStorageLinkedService, + AzureBlobStorageLinkedService, AzureStorageLinkedService All required parameters must be populated in order to send to Azure. @@ -61,7 +65,7 @@ class LinkedService(Model): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -81,7 +85,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py index 5fb8974f28db..9d65437b5daa 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py @@ -29,7 +29,7 @@ class MagentoLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py index 420656103983..74de1573118b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py @@ -29,7 +29,7 @@ class MagentoLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py index 679ba2a0669e..df49fe63a544 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py @@ -27,6 +27,10 @@ class MagentoSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class MagentoSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py index a01cf80a969a..15efcc12a054 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py @@ -27,6 +27,10 @@ class MagentoSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class MagentoSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'MagentoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py index 0a98a04138dc..3bbe048d4877 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py @@ -29,7 +29,7 @@ class MariaDBLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py index ef1114660ad7..475284d56038 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py @@ -29,7 +29,7 @@ class MariaDBLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py index 96b7116cd3ac..a744c1c5ff8f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py @@ -27,6 +27,10 @@ class MariaDBSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class MariaDBSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py index 1dbb6f327d04..472877b8f0bb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py @@ -27,6 +27,10 @@ class MariaDBSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class MariaDBSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'MariaDBSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py index 432676824a75..2a9e76446122 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py @@ -29,7 +29,7 @@ class MarketoLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py index b4e360931809..dc326f24acd5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py @@ -29,7 +29,7 @@ class MarketoLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py index 4867951baae7..6d2061ef0dee 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py @@ -27,6 +27,10 @@ class MarketoSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class MarketoSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py index 52c16eae0437..573dc0439754 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py @@ -27,6 +27,10 @@ class MarketoSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class MarketoSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'MarketoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py new file mode 100644 index 000000000000..a2d2127d1397 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class MongoDbCursorMethodsProperties(Model): + """Cursor methods for Mongodb query. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param project: Specifies the fields to return in the documents that match + the query filter. To return all fields in the matching documents, omit + this parameter. Type: string (or Expression with resultType string). + :type project: object + :param sort: Specifies the order in which the query returns matching + documents. Type: string (or Expression with resultType string). Type: + string (or Expression with resultType string). + :type sort: object + :param skip: Specifies the how many documents skipped and where MongoDB + begins returning results. This approach may be useful in implementing + paginated results. Type: integer (or Expression with resultType integer). + :type skip: object + :param limit: Specifies the maximum number of documents the server + returns. limit() is analogous to the LIMIT statement in a SQL database. + Type: integer (or Expression with resultType integer). + :type limit: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'project': {'key': 'project', 'type': 'object'}, + 'sort': {'key': 'sort', 'type': 'object'}, + 'skip': {'key': 'skip', 'type': 'object'}, + 'limit': {'key': 'limit', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.project = kwargs.get('project', None) + self.sort = kwargs.get('sort', None) + self.skip = kwargs.get('skip', None) + self.limit = kwargs.get('limit', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py new file mode 100644 index 000000000000..e1e3f50d1539 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class MongoDbCursorMethodsProperties(Model): + """Cursor methods for Mongodb query. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param project: Specifies the fields to return in the documents that match + the query filter. To return all fields in the matching documents, omit + this parameter. Type: string (or Expression with resultType string). + :type project: object + :param sort: Specifies the order in which the query returns matching + documents. Type: string (or Expression with resultType string). Type: + string (or Expression with resultType string). + :type sort: object + :param skip: Specifies the how many documents skipped and where MongoDB + begins returning results. This approach may be useful in implementing + paginated results. Type: integer (or Expression with resultType integer). + :type skip: object + :param limit: Specifies the maximum number of documents the server + returns. limit() is analogous to the LIMIT statement in a SQL database. + Type: integer (or Expression with resultType integer). + :type limit: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'project': {'key': 'project', 'type': 'object'}, + 'sort': {'key': 'sort', 'type': 'object'}, + 'skip': {'key': 'skip', 'type': 'object'}, + 'limit': {'key': 'limit', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, project=None, sort=None, skip=None, limit=None, **kwargs) -> None: + super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.project = project + self.sort = sort + self.skip = skip + self.limit = limit diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py index 49d53510f7fd..76d162b0ff70 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py @@ -29,7 +29,7 @@ class MongoDbLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py index c1d96a5465b9..95308b6ea8f0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py @@ -29,7 +29,7 @@ class MongoDbLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py index b9f0be6b97d3..3da4b931f5e5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py @@ -27,6 +27,10 @@ class MongoDbSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Should be a SQL-92 query expression. Type: @@ -42,6 +46,7 @@ class MongoDbSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py index b4f01d8d7ffb..ab3e5b6e0cc9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py @@ -27,6 +27,10 @@ class MongoDbSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Should be a SQL-92 query expression. Type: @@ -42,11 +46,12 @@ class MongoDbSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'MongoDbSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py new file mode 100644 index 000000000000..17089373d4c5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class MongoDbV2CollectionDataset(Dataset): + """The MongoDB database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the MongoDB database. + Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbV2CollectionDataset, self).__init__(**kwargs) + self.collection = kwargs.get('collection', None) + self.type = 'MongoDbV2Collection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py new file mode 100644 index 000000000000..ad1e5c538645 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MongoDbV2CollectionDataset(Dataset): + """The MongoDB database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the MongoDB database. + Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, collection, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(MongoDbV2CollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.collection = collection + self.type = 'MongoDbV2Collection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py new file mode 100644 index 000000000000..bb29fc767420 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class MongoDbV2LinkedService(LinkedService): + """Linked service for MongoDB data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The MongoDB connection string. Type: + string, SecureString or AzureKeyVaultSecretReference. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB database that you want + to access. Type: string (or Expression with resultType string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbV2LinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.database = kwargs.get('database', None) + self.type = 'MongoDbV2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py new file mode 100644 index 000000000000..d1388ce797a5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MongoDbV2LinkedService(LinkedService): + """Linked service for MongoDB data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The MongoDB connection string. Type: + string, SecureString or AzureKeyVaultSecretReference. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB database that you want + to access. Type: string (or Expression with resultType string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(MongoDbV2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.database = database + self.type = 'MongoDbV2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py new file mode 100644 index 000000000000..295b74228b9a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class MongoDbV2Source(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property�s + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbV2Source, self).__init__(**kwargs) + self.filter = kwargs.get('filter', None) + self.cursor_methods = kwargs.get('cursor_methods', None) + self.batch_size = kwargs.get('batch_size', None) + self.type = 'MongoDbV2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py new file mode 100644 index 000000000000..872b060a49bb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MongoDbV2Source(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property�s + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, **kwargs) -> None: + super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size + self.type = 'MongoDbV2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py index dd279ab6baa3..1be28aa1b6ab 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py @@ -34,6 +34,9 @@ class MultiplePipelineTrigger(Trigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. @@ -50,6 +53,7 @@ class MultiplePipelineTrigger(Trigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py index 3400431e49e2..206ab74ef419 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py @@ -34,6 +34,9 @@ class MultiplePipelineTrigger(Trigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. @@ -50,6 +53,7 @@ class MultiplePipelineTrigger(Trigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, } @@ -58,7 +62,7 @@ class MultiplePipelineTrigger(Trigger): 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} } - def __init__(self, *, additional_properties=None, description: str=None, pipelines=None, **kwargs) -> None: - super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, **kwargs) + def __init__(self, *, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: + super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) self.pipelines = pipelines self.type = 'MultiplePipelineTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py index 542fb13b7a37..ec85b0136714 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py @@ -29,7 +29,7 @@ class MySqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py index cd87d5e7e3b5..b8038df22fd6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py @@ -29,7 +29,7 @@ class MySqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py index 319a68efddc5..5d94bdecaf62 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py @@ -29,7 +29,7 @@ class NetezzaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py index 6c3b607d60dc..2fcc288fd5b7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py @@ -29,7 +29,7 @@ class NetezzaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py index 0c08b1440614..caf73f9ef81d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py @@ -27,6 +27,10 @@ class NetezzaSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class NetezzaSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py index 2b4c38f708ee..101a1f26a74d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py @@ -27,6 +27,10 @@ class NetezzaSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class NetezzaSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'NetezzaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py index 9a7edca9ddb1..01db8d71e924 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py @@ -29,7 +29,7 @@ class ODataLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -37,7 +37,8 @@ class ODataLinkedService(LinkedService): (or Expression with resultType string). :type url: object :param authentication_type: Type of authentication used to connect to the - OData service. Possible values include: 'Basic', 'Anonymous' + OData service. Possible values include: 'Basic', 'Anonymous', 'Windows', + 'AadServicePrincipal', 'ManagedServiceIdentity' :type authentication_type: str or ~azure.mgmt.datafactory.models.ODataAuthenticationType :param user_name: User name of the OData service. Type: string (or @@ -45,6 +46,38 @@ class ODataLinkedService(LinkedService): :type user_name: object :param password: Password of the OData service. :type password: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Specify the tenant information (domain name or tenant ID) + under which your application resides. Type: string (or Expression with + resultType string). + :type tenant: object + :param service_principal_id: Specify the application id of your + application registered in Azure Active Directory. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param aad_resource_id: Specify the resource you are requesting + authorization to use Directory. Type: string (or Expression with + resultType string). + :type aad_resource_id: object + :param aad_service_principal_credential_type: Specify the credential type + (key or cert) is used for service principal. Possible values include: + 'ServicePrincipalKey', 'ServicePrincipalCert' + :type aad_service_principal_credential_type: str or + ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType + :param service_principal_key: Specify the secret of your application + registered in Azure Active Directory. Type: string (or Expression with + resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert: Specify the base64 encoded + certificate of your application registered in Azure Active Directory. + Type: string (or Expression with resultType string). + :type service_principal_embedded_cert: + ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert_password: Specify the password of + your certificate if your certificate has a password and you are using + AadServicePrincipal authentication. Type: string (or Expression with + resultType string). + :type service_principal_embedded_cert_password: + ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -67,6 +100,13 @@ class ODataLinkedService(LinkedService): 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, + 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -76,5 +116,12 @@ def __init__(self, **kwargs): self.authentication_type = kwargs.get('authentication_type', None) self.user_name = kwargs.get('user_name', None) self.password = kwargs.get('password', None) + self.tenant = kwargs.get('tenant', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.aad_resource_id = kwargs.get('aad_resource_id', None) + self.aad_service_principal_credential_type = kwargs.get('aad_service_principal_credential_type', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.service_principal_embedded_cert = kwargs.get('service_principal_embedded_cert', None) + self.service_principal_embedded_cert_password = kwargs.get('service_principal_embedded_cert_password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'OData' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py index 688bb4e4ffda..fcf2d8bb9819 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py @@ -29,7 +29,7 @@ class ODataLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -37,7 +37,8 @@ class ODataLinkedService(LinkedService): (or Expression with resultType string). :type url: object :param authentication_type: Type of authentication used to connect to the - OData service. Possible values include: 'Basic', 'Anonymous' + OData service. Possible values include: 'Basic', 'Anonymous', 'Windows', + 'AadServicePrincipal', 'ManagedServiceIdentity' :type authentication_type: str or ~azure.mgmt.datafactory.models.ODataAuthenticationType :param user_name: User name of the OData service. Type: string (or @@ -45,6 +46,38 @@ class ODataLinkedService(LinkedService): :type user_name: object :param password: Password of the OData service. :type password: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Specify the tenant information (domain name or tenant ID) + under which your application resides. Type: string (or Expression with + resultType string). + :type tenant: object + :param service_principal_id: Specify the application id of your + application registered in Azure Active Directory. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param aad_resource_id: Specify the resource you are requesting + authorization to use Directory. Type: string (or Expression with + resultType string). + :type aad_resource_id: object + :param aad_service_principal_credential_type: Specify the credential type + (key or cert) is used for service principal. Possible values include: + 'ServicePrincipalKey', 'ServicePrincipalCert' + :type aad_service_principal_credential_type: str or + ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType + :param service_principal_key: Specify the secret of your application + registered in Azure Active Directory. Type: string (or Expression with + resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert: Specify the base64 encoded + certificate of your application registered in Azure Active Directory. + Type: string (or Expression with resultType string). + :type service_principal_embedded_cert: + ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert_password: Specify the password of + your certificate if your certificate has a password and you are using + AadServicePrincipal authentication. Type: string (or Expression with + resultType string). + :type service_principal_embedded_cert_password: + ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -67,14 +100,28 @@ class ODataLinkedService(LinkedService): 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, + 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, tenant=None, service_principal_id=None, aad_resource_id=None, aad_service_principal_credential_type=None, service_principal_key=None, service_principal_embedded_cert=None, service_principal_embedded_cert_password=None, encrypted_credential=None, **kwargs) -> None: super(ODataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.url = url self.authentication_type = authentication_type self.user_name = user_name self.password = password + self.tenant = tenant + self.service_principal_id = service_principal_id + self.aad_resource_id = aad_resource_id + self.aad_service_principal_credential_type = aad_service_principal_credential_type + self.service_principal_key = service_principal_key + self.service_principal_embedded_cert = service_principal_embedded_cert + self.service_principal_embedded_cert_password = service_principal_embedded_cert_password self.encrypted_credential = encrypted_credential self.type = 'OData' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py index 43559b76e0e0..53d21dee2def 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py @@ -29,7 +29,7 @@ class OdbcLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py index e0147881f3d0..2e376d23c67a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py @@ -29,7 +29,7 @@ class OdbcLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py index 4598952cb21b..ced7e1dbd9e4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py @@ -34,6 +34,10 @@ class OdbcSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -51,6 +55,7 @@ class OdbcSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py index 430329bdf2b9..9a181f8df7e9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py @@ -34,6 +34,10 @@ class OdbcSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -51,11 +55,12 @@ class OdbcSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, pre_copy_script=None, **kwargs) -> None: - super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'OdbcSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py new file mode 100644 index 000000000000..baa90666d669 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class Office365Dataset(Dataset): + """The Office365 account. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. Name of the dataset to extract from Office + 365. Type: string (or Expression with resultType string). + :type table_name: object + :param predicate: A predicate expression that can be used to filter the + specific rows to extract from Office 365. Type: string (or Expression with + resultType string). + :type predicate: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Office365Dataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.predicate = kwargs.get('predicate', None) + self.type = 'Office365Table' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py new file mode 100644 index 000000000000..5517f7daf9e3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class Office365Dataset(Dataset): + """The Office365 account. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. Name of the dataset to extract from Office + 365. Type: string (or Expression with resultType string). + :type table_name: object + :param predicate: A predicate expression that can be used to filter the + specific rows to extract from Office 365. Type: string (or Expression with + resultType string). + :type predicate: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, predicate=None, **kwargs) -> None: + super(Office365Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.predicate = predicate + self.type = 'Office365Table' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py new file mode 100644 index 000000000000..2dc98897482a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class Office365LinkedService(LinkedService): + """Office365 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param office365_tenant_id: Required. Azure tenant ID to which the Office + 365 account belongs. Type: string (or Expression with resultType string). + :type office365_tenant_id: object + :param service_principal_tenant_id: Required. Specify the tenant + information under which your Azure AD web application resides. Type: + string (or Expression with resultType string). + :type service_principal_tenant_id: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'office365_tenant_id': {'required': True}, + 'service_principal_tenant_id': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, + 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Office365LinkedService, self).__init__(**kwargs) + self.office365_tenant_id = kwargs.get('office365_tenant_id', None) + self.service_principal_tenant_id = kwargs.get('service_principal_tenant_id', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Office365' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py new file mode 100644 index 000000000000..5a69c0d895fa --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class Office365LinkedService(LinkedService): + """Office365 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param office365_tenant_id: Required. Azure tenant ID to which the Office + 365 account belongs. Type: string (or Expression with resultType string). + :type office365_tenant_id: object + :param service_principal_tenant_id: Required. Specify the tenant + information under which your Azure AD web application resides. Type: + string (or Expression with resultType string). + :type service_principal_tenant_id: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'office365_tenant_id': {'required': True}, + 'service_principal_tenant_id': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, + 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, office365_tenant_id, service_principal_tenant_id, service_principal_id, service_principal_key, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, encrypted_credential=None, **kwargs) -> None: + super(Office365LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.office365_tenant_id = office365_tenant_id + self.service_principal_tenant_id = service_principal_tenant_id + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.encrypted_credential = encrypted_credential + self.type = 'Office365' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py new file mode 100644 index 000000000000..8dff7a01ccaa --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class Office365Source(CopySource): + """A copy activity source for an Office365 service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(Office365Source, self).__init__(**kwargs) + self.type = 'Office365Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py new file mode 100644 index 000000000000..25ae6340ae01 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class Office365Source(CopySource): + """A copy activity source for an Office365 service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'Office365Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py index 5485151adb1f..19f715dfd9e2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py @@ -29,7 +29,7 @@ class OracleLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py index 80b0ed1176ff..a46f0463afb5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py @@ -29,7 +29,7 @@ class OracleLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py new file mode 100644 index 000000000000..44ce000868b7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py @@ -0,0 +1,95 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class OracleServiceCloudLinkedService(LinkedService): + """Oracle Service Cloud linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Oracle Service Cloud instance. + :type host: object + :param username: Required. The user name that you use to access Oracle + Service Cloud server. + :type username: object + :param password: Required. The password corresponding to the user name + that you provided in the username key. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleServiceCloudLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'OracleServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py new file mode 100644 index 000000000000..8732e2e82ca0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py @@ -0,0 +1,95 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class OracleServiceCloudLinkedService(LinkedService): + """Oracle Service Cloud linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Oracle Service Cloud instance. + :type host: object + :param username: Required. The user name that you use to access Oracle + Service Cloud server. + :type username: object + :param password: Required. The password corresponding to the user name + that you provided in the username key. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, username, password, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(OracleServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'OracleServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py new file mode 100644 index 000000000000..35ce3439d8a0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class OracleServiceCloudObjectDataset(Dataset): + """Oracle Service Cloud dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleServiceCloudObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'OracleServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py new file mode 100644 index 000000000000..a478e1abc828 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class OracleServiceCloudObjectDataset(Dataset): + """Oracle Service Cloud dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(OracleServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'OracleServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py new file mode 100644 index 000000000000..f42291941393 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class OracleServiceCloudSource(CopySource): + """A copy activity Oracle Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleServiceCloudSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'OracleServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py new file mode 100644 index 000000000000..1fa5d6eb3748 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class OracleServiceCloudSource(CopySource): + """A copy activity Oracle Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'OracleServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py index fa0e11f57553..1f6c747c49db 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py @@ -34,6 +34,10 @@ class OracleSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression @@ -51,6 +55,7 @@ class OracleSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py index a6b666d31ed7..3a571c66732a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py @@ -34,6 +34,10 @@ class OracleSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression @@ -51,11 +55,12 @@ class OracleSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, pre_copy_script=None, **kwargs) -> None: - super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'OracleSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py index 3f74cf83ee7a..12b3aa31353f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py @@ -27,6 +27,10 @@ class OracleSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param oracle_reader_query: Oracle reader query. Type: string (or @@ -46,6 +50,7 @@ class OracleSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py index 89252615e6e5..43afe27fda2f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py @@ -27,6 +27,10 @@ class OracleSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param oracle_reader_query: Oracle reader query. Type: string (or @@ -46,13 +50,14 @@ class OracleSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, oracle_reader_query=None, query_timeout=None, **kwargs) -> None: - super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, **kwargs) -> None: + super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.oracle_reader_query = oracle_reader_query self.query_timeout = query_timeout self.type = 'OracleSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py index 4af8faaca8db..af51100cd88e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py @@ -43,15 +43,14 @@ class OracleTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: Required. The table name of the on-premises Oracle - database. Type: string (or Expression with resultType string). + :param table_name: The table name of the on-premises Oracle database. + Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py index aaa1291c8f76..563371653de8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py @@ -43,15 +43,14 @@ class OracleTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: Required. The table name of the on-premises Oracle - database. Type: string (or Expression with resultType string). + :param table_name: The table name of the on-premises Oracle database. + Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { @@ -67,7 +66,7 @@ class OracleTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(OracleTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'OracleTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py index 190fc45985d3..d7ae0bc075e7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py @@ -29,7 +29,7 @@ class PaypalLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py index 832b0dff257b..c11cda7a52f3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py @@ -29,7 +29,7 @@ class PaypalLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py index 5bb73029d10c..94cdbccae6ee 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py @@ -27,6 +27,10 @@ class PaypalSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class PaypalSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py index 6a9dcce16a2d..05730d0ae067 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py @@ -27,6 +27,10 @@ class PaypalSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class PaypalSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'PaypalSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py index b9d16bc32c56..308a8e4cf592 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py @@ -29,7 +29,7 @@ class PhoenixLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py index aeb89e4fdd4a..de8210c2cc89 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py @@ -29,7 +29,7 @@ class PhoenixLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py index daad6ec41c31..30171c6177ff 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py @@ -27,6 +27,10 @@ class PhoenixSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class PhoenixSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py index 619e7220dd09..1384f59e1aa4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py @@ -27,6 +27,10 @@ class PhoenixSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class PhoenixSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'PhoenixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py index 3ae4beb48ff1..a2407bd9835f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py @@ -23,6 +23,12 @@ class PipelineRun(Model): :type additional_properties: dict[str, object] :ivar run_id: Identifier of a run. :vartype run_id: str + :ivar run_group_id: Identifier that correlates all the recovery runs of a + pipeline run. + :vartype run_group_id: str + :ivar is_latest: Indicates if the recovered pipeline run is the latest in + its group. + :vartype is_latest: bool :ivar pipeline_name: The pipeline name. :vartype pipeline_name: str :ivar parameters: The full or partial list of parameter name, value pair @@ -47,6 +53,8 @@ class PipelineRun(Model): _validation = { 'run_id': {'readonly': True}, + 'run_group_id': {'readonly': True}, + 'is_latest': {'readonly': True}, 'pipeline_name': {'readonly': True}, 'parameters': {'readonly': True}, 'invoked_by': {'readonly': True}, @@ -61,6 +69,8 @@ class PipelineRun(Model): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'run_id': {'key': 'runId', 'type': 'str'}, + 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, + 'is_latest': {'key': 'isLatest', 'type': 'bool'}, 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{str}'}, 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, @@ -76,6 +86,8 @@ def __init__(self, **kwargs): super(PipelineRun, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.run_id = None + self.run_group_id = None + self.is_latest = None self.pipeline_name = None self.parameters = None self.invoked_by = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py index aed5dd0466d2..33e0f23f24ac 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py @@ -23,6 +23,12 @@ class PipelineRun(Model): :type additional_properties: dict[str, object] :ivar run_id: Identifier of a run. :vartype run_id: str + :ivar run_group_id: Identifier that correlates all the recovery runs of a + pipeline run. + :vartype run_group_id: str + :ivar is_latest: Indicates if the recovered pipeline run is the latest in + its group. + :vartype is_latest: bool :ivar pipeline_name: The pipeline name. :vartype pipeline_name: str :ivar parameters: The full or partial list of parameter name, value pair @@ -47,6 +53,8 @@ class PipelineRun(Model): _validation = { 'run_id': {'readonly': True}, + 'run_group_id': {'readonly': True}, + 'is_latest': {'readonly': True}, 'pipeline_name': {'readonly': True}, 'parameters': {'readonly': True}, 'invoked_by': {'readonly': True}, @@ -61,6 +69,8 @@ class PipelineRun(Model): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'run_id': {'key': 'runId', 'type': 'str'}, + 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, + 'is_latest': {'key': 'isLatest', 'type': 'bool'}, 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{str}'}, 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, @@ -76,6 +86,8 @@ def __init__(self, *, additional_properties=None, **kwargs) -> None: super(PipelineRun, self).__init__(**kwargs) self.additional_properties = additional_properties self.run_id = None + self.run_group_id = None + self.is_latest = None self.pipeline_name = None self.parameters = None self.invoked_by = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py index af16c6c89cd2..f8ce5bd0803e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py @@ -29,7 +29,7 @@ class PostgreSqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py index 5e7e674a2447..0221aa620064 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py @@ -29,7 +29,7 @@ class PostgreSqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py index abf4adde8515..21f18f07b262 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py @@ -29,7 +29,7 @@ class PrestoLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py index fe178f62df4f..75ab99d5a58f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py @@ -29,7 +29,7 @@ class PrestoLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py index 333a4e6dca9e..9b7274011265 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py @@ -27,6 +27,10 @@ class PrestoSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class PrestoSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py index ad16115ef8f3..47fe3eb5f790 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py @@ -27,6 +27,10 @@ class PrestoSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class PrestoSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'PrestoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py index c2ca123e5409..6353c1cda96a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py @@ -29,7 +29,7 @@ class QuickBooksLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py index 7ba9f145c26e..be12fc5cfba5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py @@ -29,7 +29,7 @@ class QuickBooksLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py index b8567cd772ed..cce0a026ae5a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py @@ -27,6 +27,10 @@ class QuickBooksSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class QuickBooksSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py index b6bb7a260d1d..a00f35d4e1c1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py @@ -27,6 +27,10 @@ class QuickBooksSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class QuickBooksSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'QuickBooksSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py index 1dc8ff198eb8..2450f31222df 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py @@ -27,6 +27,10 @@ class RelationalSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType @@ -42,6 +46,7 @@ class RelationalSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py index 9e7a75043b8c..f88383cbd729 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py @@ -27,6 +27,10 @@ class RelationalSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType @@ -42,11 +46,12 @@ class RelationalSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'RelationalSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py index e66cf2feebbc..8c5ca2d67f3c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py @@ -31,6 +31,9 @@ class RerunTumblingWindowTrigger(Trigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param parent_trigger: The parent trigger reference. @@ -58,6 +61,7 @@ class RerunTumblingWindowTrigger(Trigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py index eafc3b5743a0..4a7a20759c1b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py @@ -31,6 +31,9 @@ class RerunTumblingWindowTrigger(Trigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param parent_trigger: The parent trigger reference. @@ -58,6 +61,7 @@ class RerunTumblingWindowTrigger(Trigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, @@ -65,8 +69,8 @@ class RerunTumblingWindowTrigger(Trigger): 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, } - def __init__(self, *, requested_start_time, requested_end_time, max_concurrency: int, additional_properties=None, description: str=None, parent_trigger=None, **kwargs) -> None: - super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, **kwargs) + def __init__(self, *, requested_start_time, requested_end_time, max_concurrency: int, additional_properties=None, description: str=None, annotations=None, parent_trigger=None, **kwargs) -> None: + super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) self.parent_trigger = parent_trigger self.requested_start_time = requested_start_time self.requested_end_time = requested_end_time diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py index 9c1b8e4c3cbd..16d1af502787 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py @@ -29,7 +29,7 @@ class ResponsysLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py index 4c1997e6ab26..6d8a74a0a34b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py @@ -29,7 +29,7 @@ class ResponsysLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py index 1e1a9397a6ba..fd25b8e71377 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py @@ -27,6 +27,10 @@ class ResponsysSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class ResponsysSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py index 3bfb9c19a2a7..8d5e4ac091f7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py @@ -27,6 +27,10 @@ class ResponsysSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class ResponsysSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'ResponsysSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py new file mode 100644 index 000000000000..9a5d41858e54 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class RestResourceDataset(Dataset): + """A Rest service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param relative_url: The relative URL to the resource that the RESTful API + provides. Type: string (or Expression with resultType string). + :type relative_url: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page + requests. Type: string (or Expression with resultType string). + :type pagination_rules: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, + 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, + 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RestResourceDataset, self).__init__(**kwargs) + self.relative_url = kwargs.get('relative_url', None) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.pagination_rules = kwargs.get('pagination_rules', None) + self.type = 'RestResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py new file mode 100644 index 000000000000..99f39c97f373 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class RestResourceDataset(Dataset): + """A Rest service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param relative_url: The relative URL to the resource that the RESTful API + provides. Type: string (or Expression with resultType string). + :type relative_url: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page + requests. Type: string (or Expression with resultType string). + :type pagination_rules: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, + 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, + 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, relative_url=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, **kwargs) -> None: + super(RestResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.relative_url = relative_url + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.pagination_rules = pagination_rules + self.type = 'RestResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py new file mode 100644 index 000000000000..0fbb15654438 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py @@ -0,0 +1,107 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class RestServiceLinkedService(LinkedService): + """Rest Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The base URL of the REST service. + :type url: object + :param enable_server_certificate_validation: Whether to validate server + side SSL certificate when connecting to the endpoint.The default value is + true. Type: boolean (or Expression with resultType boolean). + :type enable_server_certificate_validation: object + :param authentication_type: Required. Type of authentication used to + connect to the REST service. Possible values include: 'Anonymous', + 'Basic', 'AadServicePrincipal', 'ManagedServiceIdentity' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.RestServiceAuthenticationType + :param user_name: The user name used in Basic authentication type. + :type user_name: object + :param password: The password used in Basic authentication type. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_id: The application's client ID used in + AadServicePrincipal authentication type. + :type service_principal_id: object + :param service_principal_key: The application's key used in + AadServicePrincipal authentication type. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The tenant information (domain name or tenant ID) used in + AadServicePrincipal authentication type under which your application + resides. + :type tenant: object + :param aad_resource_id: The resource you are requesting authorization to + use. + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RestServiceLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.aad_resource_id = kwargs.get('aad_resource_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'RestService' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py new file mode 100644 index 000000000000..9af9f609e52b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py @@ -0,0 +1,107 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class RestServiceLinkedService(LinkedService): + """Rest Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The base URL of the REST service. + :type url: object + :param enable_server_certificate_validation: Whether to validate server + side SSL certificate when connecting to the endpoint.The default value is + true. Type: boolean (or Expression with resultType boolean). + :type enable_server_certificate_validation: object + :param authentication_type: Required. Type of authentication used to + connect to the REST service. Possible values include: 'Anonymous', + 'Basic', 'AadServicePrincipal', 'ManagedServiceIdentity' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.RestServiceAuthenticationType + :param user_name: The user name used in Basic authentication type. + :type user_name: object + :param password: The password used in Basic authentication type. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_id: The application's client ID used in + AadServicePrincipal authentication type. + :type service_principal_id: object + :param service_principal_key: The application's key used in + AadServicePrincipal authentication type. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The tenant information (domain name or tenant ID) used in + AadServicePrincipal authentication type under which your application + resides. + :type tenant: object + :param aad_resource_id: The resource you are requesting authorization to + use. + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, enable_server_certificate_validation=None, user_name=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, aad_resource_id=None, encrypted_credential=None, **kwargs) -> None: + super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.enable_server_certificate_validation = enable_server_certificate_validation + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.aad_resource_id = aad_resource_id + self.encrypted_credential = encrypted_credential + self.type = 'RestService' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py new file mode 100644 index 000000000000..a8c7efca21e3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class RestSource(CopySource): + """A copy activity Rest service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP + response. It is the timeout to get a response, not the timeout to read + response data. Default value: 00:01:40. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + :param request_interval: The time to await before sending next page + request. + :type request_interval: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RestSource, self).__init__(**kwargs) + self.http_request_timeout = kwargs.get('http_request_timeout', None) + self.request_interval = kwargs.get('request_interval', None) + self.type = 'RestSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py new file mode 100644 index 000000000000..cf0878e050e0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class RestSource(CopySource): + """A copy activity Rest service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP + response. It is the timeout to get a response, not the timeout to read + response data. Default value: 00:01:40. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + :param request_interval: The time to await before sending next page + request. + :type request_interval: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, http_request_timeout=None, request_interval=None, **kwargs) -> None: + super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.http_request_timeout = http_request_timeout + self.request_interval = request_interval + self.type = 'RestSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py index 63a4cddc063d..7d54150a6815 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py @@ -24,7 +24,7 @@ class RunQueryFilter(Model): TriggerName, TriggerRunTimestamp and Status. Possible values include: 'PipelineName', 'Status', 'RunStart', 'RunEnd', 'ActivityName', 'ActivityRunStart', 'ActivityRunEnd', 'ActivityType', 'TriggerName', - 'TriggerRunTimestamp' + 'TriggerRunTimestamp', 'RunGroupId', 'LatestOnly' :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand :param operator: Required. Operator to be used for filter. Possible values include: 'Equals', 'NotEquals', 'In', 'NotIn' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py index fc95591801bd..814e7a4b499b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py @@ -24,7 +24,7 @@ class RunQueryFilter(Model): TriggerName, TriggerRunTimestamp and Status. Possible values include: 'PipelineName', 'Status', 'RunStart', 'RunEnd', 'ActivityName', 'ActivityRunStart', 'ActivityRunEnd', 'ActivityType', 'TriggerName', - 'TriggerRunTimestamp' + 'TriggerRunTimestamp', 'RunGroupId', 'LatestOnly' :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand :param operator: Required. Operator to be used for filter. Possible values include: 'Equals', 'NotEquals', 'In', 'NotIn' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py index 5804e779d1ef..c644ac664831 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py @@ -29,7 +29,7 @@ class SalesforceLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py index 9fa5287aa3b4..05fcea7a3990 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py @@ -29,7 +29,7 @@ class SalesforceLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py index f3d2861576e4..93b4fcdb3d1f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py @@ -29,7 +29,7 @@ class SalesforceMarketingCloudLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py index 863b679398e1..d7e09e27a43f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py @@ -29,7 +29,7 @@ class SalesforceMarketingCloudLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py index bf08fdaa88bf..09a0eca1758e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py @@ -27,6 +27,10 @@ class SalesforceMarketingCloudSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class SalesforceMarketingCloudSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py index 0a3d26cfb43b..9b898af0c3a1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py @@ -27,6 +27,10 @@ class SalesforceMarketingCloudSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class SalesforceMarketingCloudSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'SalesforceMarketingCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py index 525aaccd49be..9a1291bd4bfe 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py @@ -34,6 +34,10 @@ class SalesforceSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -65,6 +69,7 @@ class SalesforceSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py index 6db44ebb4228..54a56618d01e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py @@ -34,6 +34,10 @@ class SalesforceSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -65,14 +69,15 @@ class SalesforceSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: - super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: + super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name self.ignore_null_values = ignore_null_values diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py index 8442a716c842..4f2590c3ab9d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py @@ -27,6 +27,10 @@ class SalesforceSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType @@ -46,6 +50,7 @@ class SalesforceSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py index 9ebc65ddeec8..4441e92eaff3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py @@ -27,6 +27,10 @@ class SalesforceSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType @@ -46,13 +50,14 @@ class SalesforceSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, read_behavior=None, **kwargs) -> None: - super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: + super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.read_behavior = read_behavior self.type = 'SalesforceSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py index 2fbb906559bc..a57164c7215d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py @@ -29,7 +29,7 @@ class SapBWLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py index a1f6133e558d..92aef25dc215 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py @@ -29,7 +29,7 @@ class SapBWLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py index 5c9a6c2deb00..53d47ab8ae41 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py @@ -29,7 +29,7 @@ class SapCloudForCustomerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py index 85c1100d01eb..9e47fd696503 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py @@ -29,7 +29,7 @@ class SapCloudForCustomerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py index 05d98ec70eaa..e5a37858abb5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py @@ -34,6 +34,10 @@ class SapCloudForCustomerSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -52,6 +56,7 @@ class SapCloudForCustomerSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py index f3cd45263f3e..29f01fdd6891 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py @@ -34,6 +34,10 @@ class SapCloudForCustomerSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -52,11 +56,12 @@ class SapCloudForCustomerSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, write_behavior=None, **kwargs) -> None: - super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.type = 'SapCloudForCustomerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py index c8dedf91e188..561c1b342f93 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py @@ -27,6 +27,10 @@ class SapCloudForCustomerSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: SAP Cloud for Customer OData query. For example, "$top=1". @@ -42,6 +46,7 @@ class SapCloudForCustomerSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py index ab5bddf21be3..e9dab6ad1899 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py @@ -27,6 +27,10 @@ class SapCloudForCustomerSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: SAP Cloud for Customer OData query. For example, "$top=1". @@ -42,11 +46,12 @@ class SapCloudForCustomerSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'SapCloudForCustomerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py index 4303b2f9cbca..0ca69242055f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py @@ -29,7 +29,7 @@ class SapEccLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py index 24490fb39a9a..7afd76b8fe09 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py @@ -29,7 +29,7 @@ class SapEccLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py index 84aa047e6d8a..6a2d17862d6b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py @@ -27,6 +27,10 @@ class SapEccSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: SAP ECC OData query. For example, "$top=1". Type: string (or @@ -42,6 +46,7 @@ class SapEccSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'str'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py index f8993720428c..95a11500bd24 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py @@ -27,6 +27,10 @@ class SapEccSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: SAP ECC OData query. For example, "$top=1". Type: string (or @@ -42,11 +46,12 @@ class SapEccSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query: str=None, **kwargs) -> None: - super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query: str=None, **kwargs) -> None: + super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'SapEccSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py index 0c2dbec28558..391bd79f8c28 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py @@ -29,7 +29,7 @@ class SapHanaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py index c906d74d0c2b..bbf307d1bede 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py @@ -29,7 +29,7 @@ class SapHanaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py new file mode 100644 index 000000000000..bfe9c323d302 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SapOpenHubLinkedService(LinkedService): + """SAP Business Warehouse Open Hub Destination Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Host name of the SAP BW instance where the open + hub destination is located. Type: string (or Expression with resultType + string). + :type server: object + :param system_number: Required. System number of the BW system where the + open hub destination is located. (Usually a two-digit decimal number + represented as a string.) Type: string (or Expression with resultType + string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system where + the open hub destination is located. (Usually a three-digit decimal number + represented as a string) Type: string (or Expression with resultType + string). + :type client_id: object + :param language: Language of the BW system where the open hub destination + is located. The default value is EN. Type: string (or Expression with + resultType string). + :type language: object + :param user_name: Username to access the SAP BW server where the open hub + destination is located. Type: string (or Expression with resultType + string). + :type user_name: object + :param password: Password to access the SAP BW server where the open hub + destination is located. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'system_number': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapOpenHubLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.system_number = kwargs.get('system_number', None) + self.client_id = kwargs.get('client_id', None) + self.language = kwargs.get('language', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SapOpenHub' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py new file mode 100644 index 000000000000..eddc50b0f1c5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SapOpenHubLinkedService(LinkedService): + """SAP Business Warehouse Open Hub Destination Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Host name of the SAP BW instance where the open + hub destination is located. Type: string (or Expression with resultType + string). + :type server: object + :param system_number: Required. System number of the BW system where the + open hub destination is located. (Usually a two-digit decimal number + represented as a string.) Type: string (or Expression with resultType + string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system where + the open hub destination is located. (Usually a three-digit decimal number + represented as a string) Type: string (or Expression with resultType + string). + :type client_id: object + :param language: Language of the BW system where the open hub destination + is located. The default value is EN. Type: string (or Expression with + resultType string). + :type language: object + :param user_name: Username to access the SAP BW server where the open hub + destination is located. Type: string (or Expression with resultType + string). + :type user_name: object + :param password: Password to access the SAP BW server where the open hub + destination is located. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'system_number': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, system_number, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, language=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SapOpenHubLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.system_number = system_number + self.client_id = client_id + self.language = language + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SapOpenHub' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py new file mode 100644 index 000000000000..ea98207a18cf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SapOpenHubSource(CopySource): + """A copy activity source for SAP Business Warehouse Open Hub Destination + source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SapOpenHubSource, self).__init__(**kwargs) + self.type = 'SapOpenHubSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py new file mode 100644 index 000000000000..9cfa4e5243b6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapOpenHubSource(CopySource): + """A copy activity source for SAP Business Warehouse Open Hub Destination + source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SapOpenHubSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py new file mode 100644 index 000000000000..2682969c5016 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SapOpenHubTableDataset(Dataset): + """Sap Business Warehouse Open Hub Destination Table properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param open_hub_destination_name: Required. The name of the Open Hub + Destination with destination type as Database Table. Type: string (or + Expression with resultType string). + :type open_hub_destination_name: object + :param exclude_last_request: Whether to exclude the records of the last + request. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is + set, only data with requestId larger than the value of this property will + be retrieved. The default value is 0. Type: integer (or Expression with + resultType integer ). + :type base_request_id: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'open_hub_destination_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, + 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapOpenHubTableDataset, self).__init__(**kwargs) + self.open_hub_destination_name = kwargs.get('open_hub_destination_name', None) + self.exclude_last_request = kwargs.get('exclude_last_request', None) + self.base_request_id = kwargs.get('base_request_id', None) + self.type = 'SapOpenHubTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py new file mode 100644 index 000000000000..b06a53c10db3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapOpenHubTableDataset(Dataset): + """Sap Business Warehouse Open Hub Destination Table properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param open_hub_destination_name: Required. The name of the Open Hub + Destination with destination type as Database Table. Type: string (or + Expression with resultType string). + :type open_hub_destination_name: object + :param exclude_last_request: Whether to exclude the records of the last + request. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is + set, only data with requestId larger than the value of this property will + be retrieved. The default value is 0. Type: integer (or Expression with + resultType integer ). + :type base_request_id: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'open_hub_destination_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, + 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, open_hub_destination_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: + super(SapOpenHubTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.open_hub_destination_name = open_hub_destination_name + self.exclude_last_request = exclude_last_request + self.base_request_id = base_request_id + self.type = 'SapOpenHubTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py index eaebfb4c2553..b9ea331b8c6e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py @@ -30,6 +30,9 @@ class ScheduleTrigger(MultiplePipelineTrigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. @@ -49,6 +52,7 @@ class ScheduleTrigger(MultiplePipelineTrigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py index 1fc148a81b29..f13f01c7fa13 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py @@ -30,6 +30,9 @@ class ScheduleTrigger(MultiplePipelineTrigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. @@ -49,12 +52,13 @@ class ScheduleTrigger(MultiplePipelineTrigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, } - def __init__(self, *, recurrence, additional_properties=None, description: str=None, pipelines=None, **kwargs) -> None: - super(ScheduleTrigger, self).__init__(additional_properties=additional_properties, description=description, pipelines=pipelines, **kwargs) + def __init__(self, *, recurrence, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: + super(ScheduleTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) self.recurrence = recurrence self.type = 'ScheduleTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py index c433366826b8..4d42f575e769 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py @@ -29,7 +29,7 @@ class ServiceNowLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py index cdd9e8ebb718..b9d166f241d6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py @@ -29,7 +29,7 @@ class ServiceNowLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py index 00068f5e5d32..16b10bb8de5e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py @@ -27,6 +27,10 @@ class ServiceNowSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class ServiceNowSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py index ffe72cb426e7..20d1a64d04d3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py @@ -27,6 +27,10 @@ class ServiceNowSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class ServiceNowSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'ServiceNowSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py index 31a9d5524f36..aa4c535fc514 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py @@ -29,7 +29,7 @@ class SftpServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py index 581e8f2a0f8e..7decd7781348 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py @@ -29,7 +29,7 @@ class SftpServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py index b57922620ef8..ee5311dceb7a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py @@ -29,7 +29,7 @@ class ShopifyLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py index 714de7f0ddf6..ea6189277552 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py @@ -29,7 +29,7 @@ class ShopifyLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py index 3006ede4633d..d4596976d459 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py @@ -27,6 +27,10 @@ class ShopifySource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class ShopifySource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py index ec17bdce3e35..6b56edd62904 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py @@ -27,6 +27,10 @@ class ShopifySource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class ShopifySource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'ShopifySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py index 006311c492bb..4f9ab49a7bba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py @@ -29,7 +29,7 @@ class SparkLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py index c5e20deef8e8..f6433b6ab187 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py @@ -29,7 +29,7 @@ class SparkLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py index 643a71610930..6d670c1c6b2a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py @@ -27,6 +27,10 @@ class SparkSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class SparkSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py index ede7f9ed5e2b..8da01b0cd823 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py @@ -27,6 +27,10 @@ class SparkSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class SparkSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'SparkSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py index ac12b6e55e59..6b4785b91ab4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py @@ -34,6 +34,10 @@ class SqlDWSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression @@ -58,6 +62,7 @@ class SqlDWSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py index 2b2d44cf16c6..efe63dcf788a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py @@ -34,6 +34,10 @@ class SqlDWSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression @@ -58,14 +62,15 @@ class SqlDWSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, pre_copy_script=None, allow_poly_base=None, poly_base_settings=None, **kwargs) -> None: - super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, allow_poly_base=None, poly_base_settings=None, **kwargs) -> None: + super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.allow_poly_base = allow_poly_base self.poly_base_settings = poly_base_settings diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py index aa3f88a75938..1a020672f7c2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py @@ -27,6 +27,10 @@ class SqlDWSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or @@ -51,6 +55,7 @@ class SqlDWSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py index b74c004141d1..ae8fe605024f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py @@ -27,6 +27,10 @@ class SqlDWSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or @@ -51,14 +55,15 @@ class SqlDWSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: - super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: + super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py index 36230c046278..45d342212ea4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py @@ -29,7 +29,7 @@ class SqlServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py index fb446a12f601..3eb8c5063dc1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py @@ -29,7 +29,7 @@ class SqlServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py index 77692817100d..9c2ebd2b389f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py @@ -34,6 +34,10 @@ class SqlSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -60,6 +64,7 @@ class SqlSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py index 5aa68f696f16..115cc3a899e9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py @@ -34,6 +34,10 @@ class SqlSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -60,6 +64,7 @@ class SqlSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, @@ -67,8 +72,8 @@ class SqlSink(CopySink): 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, **kwargs) -> None: - super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, **kwargs) -> None: + super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py index 3f374b19f072..bb31474b1f7c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py @@ -27,6 +27,10 @@ class SqlSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param sql_reader_query: SQL reader query. Type: string (or Expression @@ -50,6 +54,7 @@ class SqlSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py index ff39b6768a9f..dcad458fd4a6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py @@ -27,6 +27,10 @@ class SqlSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param sql_reader_query: SQL reader query. Type: string (or Expression @@ -50,14 +54,15 @@ class SqlSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: - super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: + super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py index 4e9df2b68e62..4edfc8b211f7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py @@ -29,7 +29,7 @@ class SquareLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py index 0b9218efba97..40719f600a18 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py @@ -29,7 +29,7 @@ class SquareLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py index 919abc0b19fa..f083df43f13a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py @@ -27,6 +27,10 @@ class SquareSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class SquareSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py index f7ba625398af..ec8a741d564c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py @@ -27,6 +27,10 @@ class SquareSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class SquareSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'SquareSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py new file mode 100644 index 000000000000..5dff9764e2a2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata import SsisObjectMetadata + + +class SsisEnvironment(SsisObjectMetadata): + """Ssis environment. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains environment. + :type folder_id: long + :param variables: Variable in environment + :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, + } + + def __init__(self, **kwargs): + super(SsisEnvironment, self).__init__(**kwargs) + self.folder_id = kwargs.get('folder_id', None) + self.variables = kwargs.get('variables', None) + self.type = 'Environment' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py new file mode 100644 index 000000000000..43697ba62146 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata_py3 import SsisObjectMetadata + + +class SsisEnvironment(SsisObjectMetadata): + """Ssis environment. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains environment. + :type folder_id: long + :param variables: Variable in environment + :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, variables=None, **kwargs) -> None: + super(SsisEnvironment, self).__init__(id=id, name=name, description=description, **kwargs) + self.folder_id = folder_id + self.variables = variables + self.type = 'Environment' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py new file mode 100644 index 000000000000..e7d31d369392 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisEnvironmentReference(Model): + """Ssis environment reference. + + :param id: Environment reference id. + :type id: long + :param environment_folder_name: Environment folder name. + :type environment_folder_name: str + :param environment_name: Environment name. + :type environment_name: str + :param reference_type: Reference type + :type reference_type: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'environment_folder_name': {'key': 'environmentFolderName', 'type': 'str'}, + 'environment_name': {'key': 'environmentName', 'type': 'str'}, + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisEnvironmentReference, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.environment_folder_name = kwargs.get('environment_folder_name', None) + self.environment_name = kwargs.get('environment_name', None) + self.reference_type = kwargs.get('reference_type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py new file mode 100644 index 000000000000..14cbfca99d4f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisEnvironmentReference(Model): + """Ssis environment reference. + + :param id: Environment reference id. + :type id: long + :param environment_folder_name: Environment folder name. + :type environment_folder_name: str + :param environment_name: Environment name. + :type environment_name: str + :param reference_type: Reference type + :type reference_type: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'environment_folder_name': {'key': 'environmentFolderName', 'type': 'str'}, + 'environment_name': {'key': 'environmentName', 'type': 'str'}, + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, environment_folder_name: str=None, environment_name: str=None, reference_type: str=None, **kwargs) -> None: + super(SsisEnvironmentReference, self).__init__(**kwargs) + self.id = id + self.environment_folder_name = environment_folder_name + self.environment_name = environment_name + self.reference_type = reference_type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py new file mode 100644 index 000000000000..350b0d92852b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata import SsisObjectMetadata + + +class SsisFolder(SsisObjectMetadata): + """Ssis folder. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisFolder, self).__init__(**kwargs) + self.type = 'Folder' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py new file mode 100644 index 000000000000..d6483fda2c08 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata_py3 import SsisObjectMetadata + + +class SsisFolder(SsisObjectMetadata): + """Ssis folder. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, **kwargs) -> None: + super(SsisFolder, self).__init__(id=id, name=name, description=description, **kwargs) + self.type = 'Folder' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py index ed7940124645..811075137f41 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py @@ -15,6 +15,9 @@ class SsisObjectMetadata(Model): """SSIS object metadata. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SsisEnvironment, SsisPackage, SsisProject, SsisFolder + All required parameters must be populated in order to send to Azure. :param id: Metadata id. @@ -38,6 +41,10 @@ class SsisObjectMetadata(Model): 'type': {'key': 'type', 'type': 'str'}, } + _subtype_map = { + 'type': {'Environment': 'SsisEnvironment', 'Package': 'SsisPackage', 'Project': 'SsisProject', 'Folder': 'SsisFolder'} + } + def __init__(self, **kwargs): super(SsisObjectMetadata, self).__init__(**kwargs) self.id = kwargs.get('id', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py index b7373e36523c..45f7e15af4fa 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py @@ -15,6 +15,9 @@ class SsisObjectMetadata(Model): """SSIS object metadata. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SsisEnvironment, SsisPackage, SsisProject, SsisFolder + All required parameters must be populated in order to send to Azure. :param id: Metadata id. @@ -38,6 +41,10 @@ class SsisObjectMetadata(Model): 'type': {'key': 'type', 'type': 'str'}, } + _subtype_map = { + 'type': {'Environment': 'SsisEnvironment', 'Package': 'SsisPackage', 'Project': 'SsisProject', 'Folder': 'SsisFolder'} + } + def __init__(self, *, id: int=None, name: str=None, description: str=None, **kwargs) -> None: super(SsisObjectMetadata, self).__init__(**kwargs) self.id = id diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py new file mode 100644 index 000000000000..b04fc1138797 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata import SsisObjectMetadata + + +class SsisPackage(SsisObjectMetadata): + """Ssis Package. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains package. + :type folder_id: long + :param project_version: Project version which contains package. + :type project_version: long + :param project_id: Project id which contains package. + :type project_id: long + :param parameters: Parameters in package + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'project_version': {'key': 'projectVersion', 'type': 'long'}, + 'project_id': {'key': 'projectId', 'type': 'long'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, **kwargs): + super(SsisPackage, self).__init__(**kwargs) + self.folder_id = kwargs.get('folder_id', None) + self.project_version = kwargs.get('project_version', None) + self.project_id = kwargs.get('project_id', None) + self.parameters = kwargs.get('parameters', None) + self.type = 'Package' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py new file mode 100644 index 000000000000..e1e932e97ae6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata_py3 import SsisObjectMetadata + + +class SsisPackage(SsisObjectMetadata): + """Ssis Package. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains package. + :type folder_id: long + :param project_version: Project version which contains package. + :type project_version: long + :param project_id: Project id which contains package. + :type project_id: long + :param parameters: Parameters in package + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'project_version': {'key': 'projectVersion', 'type': 'long'}, + 'project_id': {'key': 'projectId', 'type': 'long'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, project_version: int=None, project_id: int=None, parameters=None, **kwargs) -> None: + super(SsisPackage, self).__init__(id=id, name=name, description=description, **kwargs) + self.folder_id = folder_id + self.project_version = project_version + self.project_id = project_id + self.parameters = parameters + self.type = 'Package' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py new file mode 100644 index 000000000000..c456af0bab48 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisParameter(Model): + """Ssis parameter. + + :param id: Parameter id. + :type id: long + :param name: Parameter name. + :type name: str + :param description: Parameter description. + :type description: str + :param data_type: Parameter type. + :type data_type: str + :param required: Whether parameter is required. + :type required: bool + :param sensitive: Whether parameter is sensitive. + :type sensitive: bool + :param design_default_value: Design default value of parameter. + :type design_default_value: str + :param default_value: Default value of parameter. + :type default_value: str + :param sensitive_default_value: Default sensitive value of parameter. + :type sensitive_default_value: str + :param value_type: Parameter value type. + :type value_type: str + :param value_set: Parameter value set. + :type value_set: bool + :param variable: Parameter reference variable. + :type variable: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'required': {'key': 'required', 'type': 'bool'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'design_default_value': {'key': 'designDefaultValue', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'str'}, + 'sensitive_default_value': {'key': 'sensitiveDefaultValue', 'type': 'str'}, + 'value_type': {'key': 'valueType', 'type': 'str'}, + 'value_set': {'key': 'valueSet', 'type': 'bool'}, + 'variable': {'key': 'variable', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisParameter, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + self.data_type = kwargs.get('data_type', None) + self.required = kwargs.get('required', None) + self.sensitive = kwargs.get('sensitive', None) + self.design_default_value = kwargs.get('design_default_value', None) + self.default_value = kwargs.get('default_value', None) + self.sensitive_default_value = kwargs.get('sensitive_default_value', None) + self.value_type = kwargs.get('value_type', None) + self.value_set = kwargs.get('value_set', None) + self.variable = kwargs.get('variable', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py new file mode 100644 index 000000000000..6a4ff73768f0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisParameter(Model): + """Ssis parameter. + + :param id: Parameter id. + :type id: long + :param name: Parameter name. + :type name: str + :param description: Parameter description. + :type description: str + :param data_type: Parameter type. + :type data_type: str + :param required: Whether parameter is required. + :type required: bool + :param sensitive: Whether parameter is sensitive. + :type sensitive: bool + :param design_default_value: Design default value of parameter. + :type design_default_value: str + :param default_value: Default value of parameter. + :type default_value: str + :param sensitive_default_value: Default sensitive value of parameter. + :type sensitive_default_value: str + :param value_type: Parameter value type. + :type value_type: str + :param value_set: Parameter value set. + :type value_set: bool + :param variable: Parameter reference variable. + :type variable: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'required': {'key': 'required', 'type': 'bool'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'design_default_value': {'key': 'designDefaultValue', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'str'}, + 'sensitive_default_value': {'key': 'sensitiveDefaultValue', 'type': 'str'}, + 'value_type': {'key': 'valueType', 'type': 'str'}, + 'value_set': {'key': 'valueSet', 'type': 'bool'}, + 'variable': {'key': 'variable', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, data_type: str=None, required: bool=None, sensitive: bool=None, design_default_value: str=None, default_value: str=None, sensitive_default_value: str=None, value_type: str=None, value_set: bool=None, variable: str=None, **kwargs) -> None: + super(SsisParameter, self).__init__(**kwargs) + self.id = id + self.name = name + self.description = description + self.data_type = data_type + self.required = required + self.sensitive = sensitive + self.design_default_value = design_default_value + self.default_value = default_value + self.sensitive_default_value = sensitive_default_value + self.value_type = value_type + self.value_set = value_set + self.variable = variable diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py new file mode 100644 index 000000000000..c29a36fb628e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata import SsisObjectMetadata + + +class SsisProject(SsisObjectMetadata): + """Ssis project. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains project. + :type folder_id: long + :param version: Project version. + :type version: long + :param environment_refs: Environment reference in project + :type environment_refs: + list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] + :param parameters: Parameters in project + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'version': {'key': 'version', 'type': 'long'}, + 'environment_refs': {'key': 'environmentRefs', 'type': '[SsisEnvironmentReference]'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, **kwargs): + super(SsisProject, self).__init__(**kwargs) + self.folder_id = kwargs.get('folder_id', None) + self.version = kwargs.get('version', None) + self.environment_refs = kwargs.get('environment_refs', None) + self.parameters = kwargs.get('parameters', None) + self.type = 'Project' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py new file mode 100644 index 000000000000..11b95a644e2f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata_py3 import SsisObjectMetadata + + +class SsisProject(SsisObjectMetadata): + """Ssis project. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains project. + :type folder_id: long + :param version: Project version. + :type version: long + :param environment_refs: Environment reference in project + :type environment_refs: + list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] + :param parameters: Parameters in project + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'version': {'key': 'version', 'type': 'long'}, + 'environment_refs': {'key': 'environmentRefs', 'type': '[SsisEnvironmentReference]'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, version: int=None, environment_refs=None, parameters=None, **kwargs) -> None: + super(SsisProject, self).__init__(id=id, name=name, description=description, **kwargs) + self.folder_id = folder_id + self.version = version + self.environment_refs = environment_refs + self.parameters = parameters + self.type = 'Project' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py new file mode 100644 index 000000000000..73fda3b27967 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisVariable(Model): + """Ssis variable. + + :param id: Variable id. + :type id: long + :param name: Variable name. + :type name: str + :param description: Variable description. + :type description: str + :param data_type: Variable type. + :type data_type: str + :param sensitive: Whether variable is sensitive. + :type sensitive: bool + :param value: Variable value. + :type value: str + :param sensitive_value: Variable sensitive value. + :type sensitive_value: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'value': {'key': 'value', 'type': 'str'}, + 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisVariable, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + self.data_type = kwargs.get('data_type', None) + self.sensitive = kwargs.get('sensitive', None) + self.value = kwargs.get('value', None) + self.sensitive_value = kwargs.get('sensitive_value', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py new file mode 100644 index 000000000000..e709842ff465 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisVariable(Model): + """Ssis variable. + + :param id: Variable id. + :type id: long + :param name: Variable name. + :type name: str + :param description: Variable description. + :type description: str + :param data_type: Variable type. + :type data_type: str + :param sensitive: Whether variable is sensitive. + :type sensitive: bool + :param value: Variable value. + :type value: str + :param sensitive_value: Variable sensitive value. + :type sensitive_value: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'value': {'key': 'value', 'type': 'str'}, + 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, data_type: str=None, sensitive: bool=None, value: str=None, sensitive_value: str=None, **kwargs) -> None: + super(SsisVariable, self).__init__(**kwargs) + self.id = id + self.name = name + self.description = description + self.data_type = data_type + self.sensitive = sensitive + self.value = value + self.sensitive_value = sensitive_value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py index 634b4268bdb5..83de0e6f61f2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py @@ -29,7 +29,7 @@ class SybaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py index 59b20a5f73cd..5b6cc0ce6ded 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py @@ -29,7 +29,7 @@ class SybaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator.py index fdd098ae9659..043c537ad860 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator.py @@ -31,6 +31,10 @@ class TabularTranslator(CopyTranslator): "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or Expression with resultType object). :type schema_mapping: object + :param collection_reference: The JSON Path of the Nested Array that is + going to do cross-apply. Type: object (or Expression with resultType + object). + :type collection_reference: object """ _validation = { @@ -42,10 +46,12 @@ class TabularTranslator(CopyTranslator): 'type': {'key': 'type', 'type': 'str'}, 'column_mappings': {'key': 'columnMappings', 'type': 'object'}, 'schema_mapping': {'key': 'schemaMapping', 'type': 'object'}, + 'collection_reference': {'key': 'collectionReference', 'type': 'object'}, } def __init__(self, **kwargs): super(TabularTranslator, self).__init__(**kwargs) self.column_mappings = kwargs.get('column_mappings', None) self.schema_mapping = kwargs.get('schema_mapping', None) + self.collection_reference = kwargs.get('collection_reference', None) self.type = 'TabularTranslator' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator_py3.py index 0bd2ce51a0f0..cb1c11e5bb53 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator_py3.py @@ -31,6 +31,10 @@ class TabularTranslator(CopyTranslator): "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or Expression with resultType object). :type schema_mapping: object + :param collection_reference: The JSON Path of the Nested Array that is + going to do cross-apply. Type: object (or Expression with resultType + object). + :type collection_reference: object """ _validation = { @@ -42,10 +46,12 @@ class TabularTranslator(CopyTranslator): 'type': {'key': 'type', 'type': 'str'}, 'column_mappings': {'key': 'columnMappings', 'type': 'object'}, 'schema_mapping': {'key': 'schemaMapping', 'type': 'object'}, + 'collection_reference': {'key': 'collectionReference', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, column_mappings=None, schema_mapping=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, column_mappings=None, schema_mapping=None, collection_reference=None, **kwargs) -> None: super(TabularTranslator, self).__init__(additional_properties=additional_properties, **kwargs) self.column_mappings = column_mappings self.schema_mapping = schema_mapping + self.collection_reference = collection_reference self.type = 'TabularTranslator' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py index b3847d7dd9f4..78b89638b359 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py @@ -29,7 +29,7 @@ class TeradataLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py index 236741422023..e80b776454c0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py @@ -29,7 +29,7 @@ class TeradataLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py index 398402178ae4..728ffc32bcb5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py @@ -35,6 +35,9 @@ class Trigger(Model): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str """ @@ -48,6 +51,7 @@ class Trigger(Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, } @@ -60,4 +64,5 @@ def __init__(self, **kwargs): self.additional_properties = kwargs.get('additional_properties', None) self.description = kwargs.get('description', None) self.runtime_state = None + self.annotations = kwargs.get('annotations', None) self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py index 09fb39534be1..862973544ab4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py @@ -35,6 +35,9 @@ class Trigger(Model): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str """ @@ -48,6 +51,7 @@ class Trigger(Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, } @@ -55,9 +59,10 @@ class Trigger(Model): 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} } - def __init__(self, *, additional_properties=None, description: str=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, description: str=None, annotations=None, **kwargs) -> None: super(Trigger, self).__init__(**kwargs) self.additional_properties = additional_properties self.description = description self.runtime_state = None + self.annotations = annotations self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py index ce46a4aac7e2..939624ae5042 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py @@ -32,6 +32,9 @@ class TumblingWindowTrigger(Trigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipeline: Required. Pipeline for which runs are created when an @@ -82,6 +85,7 @@ class TumblingWindowTrigger(Trigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py index bc3114f08edd..6856629c8b91 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py @@ -32,6 +32,9 @@ class TumblingWindowTrigger(Trigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipeline: Required. Pipeline for which runs are created when an @@ -82,6 +85,7 @@ class TumblingWindowTrigger(Trigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, @@ -94,8 +98,8 @@ class TumblingWindowTrigger(Trigger): 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, } - def __init__(self, *, pipeline, frequency, interval: int, start_time, max_concurrency: int, additional_properties=None, description: str=None, end_time=None, delay=None, retry_policy=None, depends_on=None, **kwargs) -> None: - super(TumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, **kwargs) + def __init__(self, *, pipeline, frequency, interval: int, start_time, max_concurrency: int, additional_properties=None, description: str=None, annotations=None, end_time=None, delay=None, retry_policy=None, depends_on=None, **kwargs) -> None: + super(TumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) self.pipeline = pipeline self.frequency = frequency self.interval = interval diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py new file mode 100644 index 000000000000..0d92d32c12b0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class ValidationActivity(ControlActivity): + """This activity verifies that an external resource exists. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param timeout: Specifies the timeout for the activity to run. If there is + no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 + week as default. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param sleep: A delay in seconds between validation attempts. If no value + is specified, 10 seconds will be used as the default. Type: integer (or + Expression with resultType integer). + :type sleep: object + :param minimum_size: Can be used if dataset points to a file. The file + must be greater than or equal in size to the value specified. Type: + integer (or Expression with resultType integer). + :type minimum_size: object + :param child_items: Can be used if dataset points to a folder. If set to + true, the folder must have at least one file. If set to false, the folder + must be empty. Type: boolean (or Expression with resultType boolean). + :type child_items: object + :param dataset: Required. Validation activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, + 'sleep': {'key': 'typeProperties.sleep', 'type': 'object'}, + 'minimum_size': {'key': 'typeProperties.minimumSize', 'type': 'object'}, + 'child_items': {'key': 'typeProperties.childItems', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + } + + def __init__(self, **kwargs): + super(ValidationActivity, self).__init__(**kwargs) + self.timeout = kwargs.get('timeout', None) + self.sleep = kwargs.get('sleep', None) + self.minimum_size = kwargs.get('minimum_size', None) + self.child_items = kwargs.get('child_items', None) + self.dataset = kwargs.get('dataset', None) + self.type = 'Validation' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py new file mode 100644 index 000000000000..f4680400b447 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class ValidationActivity(ControlActivity): + """This activity verifies that an external resource exists. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param timeout: Specifies the timeout for the activity to run. If there is + no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 + week as default. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param sleep: A delay in seconds between validation attempts. If no value + is specified, 10 seconds will be used as the default. Type: integer (or + Expression with resultType integer). + :type sleep: object + :param minimum_size: Can be used if dataset points to a file. The file + must be greater than or equal in size to the value specified. Type: + integer (or Expression with resultType integer). + :type minimum_size: object + :param child_items: Can be used if dataset points to a folder. If set to + true, the folder must have at least one file. If set to false, the folder + must be empty. Type: boolean (or Expression with resultType boolean). + :type child_items: object + :param dataset: Required. Validation activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, + 'sleep': {'key': 'typeProperties.sleep', 'type': 'object'}, + 'minimum_size': {'key': 'typeProperties.minimumSize', 'type': 'object'}, + 'child_items': {'key': 'typeProperties.childItems', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + } + + def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout=None, sleep=None, minimum_size=None, child_items=None, **kwargs) -> None: + super(ValidationActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.timeout = timeout + self.sleep = sleep + self.minimum_size = minimum_size + self.child_items = child_items + self.dataset = dataset + self.type = 'Validation' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py index fafba164a752..6b5e8d0103f5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py @@ -29,7 +29,7 @@ class VerticaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py index 77caf915eaab..3aee3a5ae0f6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py @@ -29,7 +29,7 @@ class VerticaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py index 1670c0e9fc49..d0b642f15d38 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py @@ -27,6 +27,10 @@ class VerticaSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class VerticaSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py index 6be2edd35218..a1c4d755f2b4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py @@ -27,6 +27,10 @@ class VerticaSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class VerticaSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'VerticaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py new file mode 100644 index 000000000000..1c648c42c3e2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class WebHookActivity(ControlActivity): + """WebHook activity. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :ivar method: Required. Rest API method for target endpoint. Default + value: "POST" . + :vartype method: str + :param url: Required. WebHook activity target endpoint and path. Type: + string (or Expression with resultType string). + :type url: object + :param timeout: The timeout within which the webhook should be called + back. If there is no value specified, it defaults to 10 minutes. Type: + string. Pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: str + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + :param authentication: Authentication method used for calling the + endpoint. + :type authentication: + ~azure.mgmt.datafactory.models.WebActivityAuthentication + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True, 'constant': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'str'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, + } + + method = "POST" + + def __init__(self, **kwargs): + super(WebHookActivity, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.timeout = kwargs.get('timeout', None) + self.headers = kwargs.get('headers', None) + self.body = kwargs.get('body', None) + self.authentication = kwargs.get('authentication', None) + self.type = 'WebHook' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py new file mode 100644 index 000000000000..40cdc6f732da --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class WebHookActivity(ControlActivity): + """WebHook activity. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :ivar method: Required. Rest API method for target endpoint. Default + value: "POST" . + :vartype method: str + :param url: Required. WebHook activity target endpoint and path. Type: + string (or Expression with resultType string). + :type url: object + :param timeout: The timeout within which the webhook should be called + back. If there is no value specified, it defaults to 10 minutes. Type: + string. Pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: str + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + :param authentication: Authentication method used for calling the + endpoint. + :type authentication: + ~azure.mgmt.datafactory.models.WebActivityAuthentication + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True, 'constant': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'str'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, + } + + method = "POST" + + def __init__(self, *, name: str, url, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout: str=None, headers=None, body=None, authentication=None, **kwargs) -> None: + super(WebHookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.url = url + self.timeout = timeout + self.headers = headers + self.body = body + self.authentication = authentication + self.type = 'WebHook' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py index cee3bd37409c..18fadba3f3ee 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py @@ -29,7 +29,7 @@ class WebLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py index 3afa3a1bcb05..3e491b0fac4d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py @@ -29,7 +29,7 @@ class WebLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py index 13bcbfbb62d7..c5d3a2a8f00a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py @@ -27,6 +27,10 @@ class WebSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ @@ -39,6 +43,7 @@ class WebSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py index 7c5ce29d3d26..684e1d4233cc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py @@ -27,6 +27,10 @@ class WebSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ @@ -39,9 +43,10 @@ class WebSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, **kwargs) -> None: - super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'WebSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py index e9daa4ff7d2a..24973f577133 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py @@ -29,7 +29,7 @@ class XeroLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py index eb665519f4ea..433c65ade739 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py @@ -29,7 +29,7 @@ class XeroLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py index 4695780bf41b..a37852a5b419 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py @@ -27,6 +27,10 @@ class XeroSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class XeroSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py index 8de950856bae..bbee6c6fa1f0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py @@ -27,6 +27,10 @@ class XeroSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class XeroSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'XeroSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py index 997efb5fc242..fe34dff77ea9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py @@ -29,7 +29,7 @@ class ZohoLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py index c05d018146d6..f82f6221592b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py @@ -29,7 +29,7 @@ class ZohoLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py index 248d50d55297..274c6fc09f19 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py @@ -27,6 +27,10 @@ class ZohoSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class ZohoSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py index 5f0547d9465a..6d7dc29bdf8a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py @@ -27,6 +27,10 @@ class ZohoSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class ZohoSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'ZohoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py index 080e8c87ba18..4a648d96586c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py @@ -105,3 +105,75 @@ def get_feature_value( return deserialized get_feature_value.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/getFeatureValue'} + + def get_feature_value_by_factory( + self, resource_group_name, factory_name, feature_name=None, feature_type=None, custom_headers=None, raw=False, **operation_config): + """Get exposure control feature for specific factory. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param feature_name: The feature name. + :type feature_name: str + :param feature_type: The feature type. + :type feature_type: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: ExposureControlResponse or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) + + # Construct URL + url = self.get_feature_value_by_factory.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('ExposureControlResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getFeatureValue'} diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py index 8a01ce6a8408..343396e705ac 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py @@ -312,7 +312,7 @@ def delete( delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} def create_run( - self, resource_group_name, factory_name, pipeline_name, reference_pipeline_run_id=None, parameters=None, custom_headers=None, raw=False, **operation_config): + self, resource_group_name, factory_name, pipeline_name, reference_pipeline_run_id=None, is_recovery=None, start_activity_name=None, parameters=None, custom_headers=None, raw=False, **operation_config): """Creates a run of a pipeline. :param resource_group_name: The resource group name. @@ -325,6 +325,13 @@ def create_run( ID is specified the parameters of the specified run will be used to create a new run. :type reference_pipeline_run_id: str + :param is_recovery: Recovery mode flag. If recovery mode is set to + true, the specified referenced pipeline run and the new run will be + grouped under the same groupId. + :type is_recovery: bool + :param start_activity_name: In recovery mode, the rerun will start + from this activity. If not specified, all activities will run. + :type start_activity_name: str :param parameters: Parameters of the pipeline run. These parameters will be used only if the runId is not specified. :type parameters: dict[str, object] @@ -353,6 +360,10 @@ def create_run( query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') if reference_pipeline_run_id is not None: query_parameters['referencePipelineRunId'] = self._serialize.query("reference_pipeline_run_id", reference_pipeline_run_id, 'str') + if is_recovery is not None: + query_parameters['isRecovery'] = self._serialize.query("is_recovery", is_recovery, 'bool') + if start_activity_name is not None: + query_parameters['startActivityName'] = self._serialize.query("start_activity_name", start_activity_name, 'str') # Construct headers header_parameters = {} diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py index 981739e4ff95..a39916c162ce 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py @@ -9,5 +9,5 @@ # regenerated. # -------------------------------------------------------------------------- -VERSION = "0.7.0" +VERSION = "1.0.0" diff --git a/sdk/datafactory/azure-mgmt-datafactory/setup.py b/sdk/datafactory/azure-mgmt-datafactory/setup.py index 4b3ca4777aca..bc211c4c2c5f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/setup.py +++ b/sdk/datafactory/azure-mgmt-datafactory/setup.py @@ -53,6 +53,7 @@ version=version, description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME), long_description=readme + '\n\n' + history, + long_description_content_type='text/x-rst', license='MIT License', author='Microsoft Corporation', author_email='azpysdkhelp@microsoft.com',